tol-sdk 1.8.5__py3-none-any.whl → 1.8.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. tol/api_base/system.py +18 -0
  2. tol/api_client/api_datasource.py +23 -15
  3. tol/api_client/client.py +13 -8
  4. tol/api_client/factory.py +9 -12
  5. tol/benchling/benchling_warehouse_datasource.py +6 -3
  6. tol/benchling/sql/{extraction_containers_dna.sql → extraction_container_extraction_extraction_type_dna.sql} +25 -23
  7. tol/benchling/sql/sequencing_request_sequencing_platform_pacbio.sql +8 -8
  8. tol/core/factory.py +1 -1
  9. tol/core/requested_fields.py +21 -0
  10. tol/dummy/__init__.py +6 -0
  11. tol/dummy/client.py +88 -0
  12. tol/dummy/converter.py +48 -0
  13. tol/dummy/dummy_datasource.py +105 -0
  14. tol/dummy/factory.py +95 -0
  15. tol/dummy/parser.py +70 -0
  16. tol/flows/converters/__init__.py +5 -0
  17. tol/flows/converters/benchling_extraction_container_to_elastic_extraction_container_converter.py +53 -0
  18. tol/flows/converters/benchling_sequencing_request_to_elastic_sequencing_request_converter.py +8 -1
  19. tol/flows/converters/combine_fields_converter.py +45 -0
  20. tol/flows/converters/default_field_value_if_missing_converter.py +43 -0
  21. tol/flows/converters/elastic_sequencing_request_to_elastic_run_data_update_converter.py +8 -0
  22. tol/flows/converters/prefix_field_converter.py +49 -0
  23. tol/flows/converters/time_string_to_time.py +37 -28
  24. tol/sources/dummy.py +17 -0
  25. tol/sql/auth/blueprint.py +12 -5
  26. tol/sql/sql_datasource.py +1 -20
  27. tol/sql/standard/factory.py +2 -0
  28. {tol_sdk-1.8.5.dist-info → tol_sdk-1.8.7.dist-info}/METADATA +1 -1
  29. {tol_sdk-1.8.5.dist-info → tol_sdk-1.8.7.dist-info}/RECORD +33 -22
  30. {tol_sdk-1.8.5.dist-info → tol_sdk-1.8.7.dist-info}/WHEEL +1 -1
  31. {tol_sdk-1.8.5.dist-info → tol_sdk-1.8.7.dist-info}/entry_points.txt +0 -0
  32. {tol_sdk-1.8.5.dist-info → tol_sdk-1.8.7.dist-info}/licenses/LICENSE +0 -0
  33. {tol_sdk-1.8.5.dist-info → tol_sdk-1.8.7.dist-info}/top_level.txt +0 -0
tol/api_base/system.py CHANGED
@@ -2,6 +2,8 @@
2
2
  #
3
3
  # SPDX-License-Identifier: MIT
4
4
 
5
+ import glob
6
+
5
7
  from flask import Blueprint
6
8
 
7
9
 
@@ -16,6 +18,7 @@ def system_blueprint(
16
18
  By default includes:
17
19
  - /healthz - used for health probes in kubernetes
18
20
  - /environment - indicates the deployment environment
21
+ - /package-versions - indicates the deployment package versions
19
22
 
20
23
  Parameters:
21
24
  - url_prefix - the prefix under which to serve this blueprint's
@@ -40,4 +43,19 @@ def system_blueprint(
40
43
  k: env_vars.get(v) for k, v in env_map.items()
41
44
  }, 200
42
45
 
46
+ @system_blueprint.route('/package-versions')
47
+ def package_versions():
48
+ glob_paths = glob.glob('./**/requirements.txt', recursive=True)
49
+ with open(glob_paths[0], 'r') as f:
50
+ requirements = f.readlines()
51
+ for requirement in requirements:
52
+ if 'tol-sdk' in requirement:
53
+ tol_sdk_version = requirement.split('==')[1].strip()
54
+ break
55
+ f.close()
56
+
57
+ result = {'tol_sdk': tol_sdk_version}
58
+
59
+ return result, 200
60
+
43
61
  return system_blueprint
@@ -35,6 +35,7 @@ from ..core.operator import (
35
35
  Upserter,
36
36
  )
37
37
  from ..core.relationship import RelationshipConfig
38
+ from ..core.requested_fields import ReqFieldsTree, requested_fields_to_tree
38
39
 
39
40
  if typing.TYPE_CHECKING:
40
41
  from ..core.session import OperableSession
@@ -133,31 +134,34 @@ class ApiDataSource(
133
134
  for k, v in transfer.items()
134
135
  }
135
136
 
137
+ @requested_fields_to_tree
136
138
  @validate('detailGet')
137
139
  def get_by_id(
138
140
  self,
139
141
  object_type: str,
140
142
  object_ids: Iterable[str],
141
143
  session: Optional[OperableSession] = None,
142
- requested_fields: list[str] | None = None,
144
+ requested_tree: ReqFieldsTree | None = None,
143
145
  ) -> Iterable[Optional[DataObject]]:
144
146
 
145
147
  client = self.__client_factory()
148
+
146
149
  json_responses = (
147
150
  client.get_detail(
148
151
  object_type,
149
152
  id_,
150
- requested_fields=requested_fields,
153
+ requested_tree=requested_tree,
151
154
  )
152
155
  for id_ in object_ids
153
156
  )
154
- json_converter = self.__jc_factory(object_type, requested_fields)
157
+ json_converter = self.__jc_factory(object_type, requested_tree)
155
158
  return (
156
159
  json_converter.convert(r)
157
160
  if r is not None else None
158
161
  for r in json_responses
159
162
  )
160
163
 
164
+ @requested_fields_to_tree
161
165
  @validate('listGet')
162
166
  def get_list_page(
163
167
  self,
@@ -167,7 +171,7 @@ class ApiDataSource(
167
171
  object_filters: Optional[DataSourceFilter] = None,
168
172
  sort_by: Optional[str] = None,
169
173
  session: Optional[OperableSession] = None,
170
- requested_fields: list[str] | None = None,
174
+ requested_tree: ReqFieldsTree | None = None,
171
175
  ) -> tuple[Iterable[DataObject], int]:
172
176
 
173
177
  filter_string = self.__get_filter_string(object_filters)
@@ -177,28 +181,30 @@ class ApiDataSource(
177
181
  page_size,
178
182
  filter_string=filter_string,
179
183
  sort_string=sort_by,
180
- requested_fields=requested_fields,
184
+ requested_tree=requested_tree,
181
185
  )
182
- return self.__jc_factory(object_type, requested_fields).convert_list(transfer)
183
186
 
187
+ return self.__jc_factory(object_type, requested_tree).convert_list(transfer)
188
+
189
+ @requested_fields_to_tree
184
190
  def get_list(
185
191
  self,
186
192
  object_type: str,
187
193
  object_filters: Optional[DataSourceFilter] = None,
188
194
  session: Optional[OperableSession] = None,
189
- requested_fields: list[str] | None = None,
195
+ requested_tree: ReqFieldsTree | None = None,
190
196
  ) -> Iterable[DataObject]:
191
197
  if self.__can_cursor(object_type, object_filters):
192
198
  return self._get_list_by_cursor(
193
199
  object_type,
194
200
  object_filters,
195
- requested_fields=requested_fields,
201
+ requested_tree=requested_tree,
196
202
  )
197
203
  else:
198
204
  return self.__get_list_regular(
199
205
  object_type,
200
206
  object_filters,
201
- requested_fields=requested_fields,
207
+ requested_tree=requested_tree,
202
208
  )
203
209
 
204
210
  @validate('count')
@@ -263,7 +269,7 @@ class ApiDataSource(
263
269
  object_filters: Optional[DataSourceFilter] = None,
264
270
  search_after: list[str] | None = None,
265
271
  session: Optional[OperableSession] = None,
266
- requested_fields: list[str] | None = None
272
+ requested_tree: ReqFieldsTree | None = None,
267
273
  ) -> tuple[Iterable[DataObject], list[str] | None]:
268
274
 
269
275
  filter_string = self.__get_filter_string(object_filters)
@@ -272,9 +278,10 @@ class ApiDataSource(
272
278
  page_size,
273
279
  search_after,
274
280
  filter_string=filter_string,
275
- requested_fields=requested_fields,
281
+ requested_tree=requested_tree,
276
282
  )
277
- return self.__jc_factory(object_type, requested_fields).convert_cursor_page(transfer)
283
+
284
+ return self.__jc_factory(object_type, requested_tree).convert_cursor_page(transfer)
278
285
 
279
286
  @validate('delete')
280
287
  def delete(
@@ -436,13 +443,14 @@ class ApiDataSource(
436
443
  self,
437
444
  object_type: str,
438
445
  object_filters: Optional[DataSourceFilter],
439
- requested_fields: list[str] | None = None,
446
+ requested_tree: ReqFieldsTree | None = None,
440
447
  ) -> Iterable[DataObject]:
441
448
 
442
449
  page = 1
443
450
  page_size = self.get_page_size()
444
451
  client = self.__client_factory()
445
- jc_converter = self.__jc_factory(object_type, requested_fields)
452
+
453
+ jc_converter = self.__jc_factory(object_type, requested_tree)
446
454
  filter_string = self.__get_filter_string(object_filters)
447
455
 
448
456
  while True:
@@ -451,7 +459,7 @@ class ApiDataSource(
451
459
  page,
452
460
  page_size,
453
461
  filter_string=filter_string,
454
- requested_fields=requested_fields,
462
+ requested_tree=requested_tree,
455
463
  )
456
464
  (results_page, _) = jc_converter.convert_list(transfer)
457
465
 
tol/api_client/client.py CHANGED
@@ -11,6 +11,7 @@ from .converter import JsonApiTransfer, JsonRelationshipConfig
11
11
  from ..core import HttpClient
12
12
  from ..core.datasource_error import DataSourceError
13
13
  from ..core.operator import OperatorDict
14
+ from ..core.requested_fields import ReqFieldsTree
14
15
 
15
16
 
16
17
  class JsonApiClient(HttpClient):
@@ -43,7 +44,7 @@ class JsonApiClient(HttpClient):
43
44
  self,
44
45
  object_type: str,
45
46
  object_id: str,
46
- requested_fields: list[str] | None = None,
47
+ requested_tree: ReqFieldsTree | None = None,
47
48
  ) -> Optional[JsonApiTransfer]:
48
49
  """
49
50
  Gets a single JSON:API transfer for the object of specified
@@ -53,11 +54,13 @@ class JsonApiClient(HttpClient):
53
54
  url = self.__detail_url(object_type, object_id)
54
55
  headers = self._merge_headers()
55
56
 
57
+ params = {}
58
+ if requested_tree:
59
+ params['requested_fields'] = requested_tree.to_paths()
60
+
56
61
  return self.__fetch_detail(
57
62
  url,
58
- params={
59
- 'requested_fields': requested_fields,
60
- },
63
+ params=params,
61
64
  headers=headers,
62
65
  )
63
66
 
@@ -68,7 +71,7 @@ class JsonApiClient(HttpClient):
68
71
  page_size: int,
69
72
  filter_string: Optional[str] = None,
70
73
  sort_string: Optional[str] = None,
71
- requested_fields: list[str] | None = None,
74
+ requested_tree: ReqFieldsTree | None = None,
72
75
  ) -> JsonApiTransfer:
73
76
  """
74
77
  Gets a (paged) list-JSON:API transfer for the objects of specified
@@ -81,7 +84,7 @@ class JsonApiClient(HttpClient):
81
84
  page_size=page_size,
82
85
  filter=filter_string,
83
86
  sort_by=sort_string,
84
- requested_fields=requested_fields
87
+ requested_fields=requested_tree.to_paths() if requested_tree else None,
85
88
  )
86
89
  headers = self._merge_headers()
87
90
  return self.__fetch_list(
@@ -157,7 +160,7 @@ class JsonApiClient(HttpClient):
157
160
  page_size: int,
158
161
  search_after: list[str] | None,
159
162
  filter_string: Optional[str] = None,
160
- requested_fields: list[str] | None = None,
163
+ requested_tree: ReqFieldsTree | None = None,
161
164
  ) -> JsonApiTransfer:
162
165
  """Cursor-pagination."""
163
166
 
@@ -165,7 +168,7 @@ class JsonApiClient(HttpClient):
165
168
  params = self.__no_none_value_dict(
166
169
  filter=filter_string,
167
170
  page_size=page_size,
168
- requested_fields=requested_fields,
171
+ requested_fields=requested_tree.to_paths() if requested_tree else None,
169
172
  )
170
173
  headers = self._merge_headers()
171
174
  body = {'search_after': search_after}
@@ -462,5 +465,7 @@ class JsonApiClient(HttpClient):
462
465
  for k, v in kwargs.items():
463
466
  if v is None:
464
467
  continue
468
+ if isinstance(v, list) and not v:
469
+ continue
465
470
  str_params[k] = ','.join([str(x) for x in v]) if isinstance(v, list) else str(v)
466
471
  return str_params
tol/api_client/factory.py CHANGED
@@ -59,23 +59,20 @@ class _ConverterFactory:
59
59
  def json_converter_factory(
60
60
  self,
61
61
  object_type: str | None = None,
62
- requested_fields: list[str] | None = None,
62
+ requested_tree: ReqFieldsTree | None = None,
63
63
  ) -> JsonConverterFactory:
64
64
  """
65
65
  Returns an instantiated `JsonApiConverter`.
66
66
  """
67
67
 
68
- req_fields_tree = (
69
- ReqFieldsTree(
70
- object_type,
71
- self.__data_source,
72
- requested_fields=requested_fields,
73
- )
74
- if object_type
75
- else None
76
- )
77
-
78
- parser = DefaultParser(self.__ds_dict, req_fields_tree)
68
+ # If we're going to parse a JSON:API data object (i.e. we have been
69
+ # passed an `object_type`), we need to add a ReqFieldsTree so that
70
+ # DataSources with default loading of to-one objects correctly
71
+ # process the to-ones from the `included` array.
72
+ if object_type and not requested_tree:
73
+ requested_tree = ReqFieldsTree(object_type, self.__data_source)
74
+
75
+ parser = DefaultParser(self.__ds_dict, requested_tree)
79
76
  return JsonApiConverter(parser)
80
77
 
81
78
  @property
@@ -45,6 +45,7 @@ class BenchlingWarehouseDataSource(DataSource, ListGetter):
45
45
  'sample': 'sts_id',
46
46
  'sequencing_request': 'sanger_sample_id',
47
47
  'extraction': 'extraction_id',
48
+ 'extraction_container': 'fluidx_container_id',
48
49
  'tissue_prep': 'eln_tissue_prep_id'
49
50
  }
50
51
 
@@ -70,7 +71,7 @@ class BenchlingWarehouseDataSource(DataSource, ListGetter):
70
71
  yield self.data_object_factory(
71
72
  object_type,
72
73
  id_=obj[id_col],
73
- attributes=obj
74
+ attributes={k: v for k, v in obj.items() if k != id_col}
74
75
  )
75
76
 
76
77
  def get_list(
@@ -82,7 +83,9 @@ class BenchlingWarehouseDataSource(DataSource, ListGetter):
82
83
  file_suffix = ''
83
84
  if object_filters is not None:
84
85
  if isinstance(object_filters.and_, dict):
85
- for field_name in ['sequencing_platform', 'extraction_type']:
86
+ for field_name in [
87
+ 'sequencing_platform', 'extraction_type', 'extraction.extraction_type'
88
+ ]:
86
89
  if field_name in object_filters.and_:
87
90
  # For an in_list, treat as multiple eq
88
91
  if 'in_list' in object_filters.and_[field_name]:
@@ -101,7 +104,7 @@ class BenchlingWarehouseDataSource(DataSource, ListGetter):
101
104
  ) for val in object_filters.and_[field_name]['in_list']['value']
102
105
  )
103
106
  elif 'eq' in object_filters.and_[field_name]:
104
- file_suffix = '_' + field_name + '_' \
107
+ file_suffix = '_' + field_name.replace('.', '_') + '_' \
105
108
  + object_filters.and_[field_name]['eq']['value']
106
109
  else:
107
110
  raise DataSourceError('Filtering only on sequencing platform and extraction '
@@ -9,28 +9,29 @@ Output: Table with columns:
9
9
 
10
10
  1) taxon_id: [character] Tissue metadata. Origin: STS
11
11
  2) eln_tissue_id: [character] Benchling ID for the tissue the extraction is derived from.
12
- 3) eln_tissue_prep_id: [character] Benchling ID for the tissue prep the extraction is derived from.
13
- 4) extraction_id: [character] DNA extraction entity ID (Benchling).
14
- 5) programme_id: [character] ToLID. Origin: BWH.
15
- 6) specimen_id: [character] Specimen ID. Origin: STS.
16
- 7) creation_date: [date] Date the container was created.
17
- 8) fluidx_container_id: [character] Primary key for the FluidX container.
18
- 9) fluidx_id: [character] FluidX barcode.
19
- 10) tube_type: [character] Type of tube/container.
20
- 11) volume_ul: [numeric] Volume in microliters (0 if archived as 'Retired' or 'Expended').
21
- 12) location: [character] Storage location name.
22
- 13) rack: [character] Box/rack barcode.
23
- 14) archive_purpose: [character] Reason for archiving the DNA extraction.
24
- 15) nanodrop_concentration_ngul: [numeric] Latest Nanodrop concentration (ng/µL).
25
- 16) dna_260_280_ratio: [numeric] Latest Nanodrop 260/280 ratio.
26
- 17) dna_260_230_ratio: [numeric] Latest Nanodrop 260/230 ratio.
27
- 18) qubit_concentration_ngul: [numeric] Latest Qubit concentration (ng/µL).
28
- 19) yield_ng: [numeric] Latest yield (ng).
29
- 20) femto_date_code: [character] Latest Femto date code.
30
- 21) femto_description: [character] Latest Femto profile description.
31
- 22) gqn_index: [numeric] Latest GQN index from Femto.
32
- 23) next_step: [character] Latest decision making next step.
33
- 24) extraction_qc_result: [character] Latest extraction QC result.
12
+ 3) tissue_sts_id: [character] STS ID for the tissue the extraction is derived from.
13
+ 4) eln_tissue_prep_id: [character] Benchling ID for the tissue prep the extraction is derived from.
14
+ 5) extraction_id: [character] DNA extraction entity ID (Benchling).
15
+ 6) programme_id: [character] ToLID. Origin: BWH.
16
+ 7) specimen_id: [character] Specimen ID. Origin: STS.
17
+ 8) creation_date: [date] Date the container was created.
18
+ 9) fluidx_container_id: [character] Primary key for the FluidX container.
19
+ 10) fluidx_id: [character] FluidX barcode.
20
+ 11) tube_type: [character] Type of tube/container.
21
+ 12) volume_ul: [numeric] Volume in microliters (0 if archived as 'Retired' or 'Expended').
22
+ 13) location: [character] Storage location name.
23
+ 14) rack: [character] Box/rack barcode.
24
+ 15) archive_purpose: [character] Reason for archiving the DNA extraction.
25
+ 16) nanodrop_concentration_ngul: [numeric] Latest Nanodrop concentration (ng/µL).
26
+ 17) dna_260_280_ratio: [numeric] Latest Nanodrop 260/280 ratio.
27
+ 18) dna_260_230_ratio: [numeric] Latest Nanodrop 260/230 ratio.
28
+ 19) qubit_concentration_ngul: [numeric] Latest Qubit concentration (ng/µL).
29
+ 20) yield_ng: [numeric] Latest yield (ng).
30
+ 21) femto_date_code: [character] Latest Femto date code.
31
+ 22) femto_description: [character] Latest Femto profile description.
32
+ 23) gqn_index: [numeric] Latest GQN index from Femto.
33
+ 24) next_step: [character] Latest decision making next step.
34
+ 25) extraction_qc_result: [character] Latest extraction QC result.
34
35
 
35
36
  NOTES:
36
37
  1) Only extractions from the 'ToL Core Lab' project and relevant folders are included.
@@ -109,6 +110,7 @@ latest_decision_making AS (
109
110
  SELECT DISTINCT
110
111
  t.taxon_id,
111
112
  t.id AS eln_tissue_id,
113
+ t.sts_id AS tissue_sts_id,
112
114
  tp.id AS eln_tissue_prep_id,
113
115
  dna.id AS extraction_id,
114
116
  t.programme_id,
@@ -170,4 +172,4 @@ LEFT JOIN entry$raw AS ent
170
172
  WHERE proj.name = 'ToL Core Lab'
171
173
  AND (f.name IN ('Routine Throughput', 'DNA', 'Core Lab Entities', 'Benchling MS Project Move') OR f.name IS NULL)
172
174
  AND (con.archive_purpose$ != ('Made in error') OR con.archive_purpose$ IS NULL)
173
- AND ent.name NOT LIKE '%Nuclei isolation and tagmentation%'
175
+ AND COALESCE(ent.name, '') NOT LIKE '%Nuclei isolation and tagmentation%'
@@ -24,7 +24,7 @@ Output: Table with cols:
24
24
  4) eln_file_registry_id: [character] id in Benchling Registry. Origin: BWH
25
25
  5) extraction_id: [character] Original DNA extract entity name. For pooled samples, the first DNA extract pooled. Origin: BWH
26
26
  6) submission_sample_name: [character] Entity name. Origin: BWH
27
- 7) fluidx_id: [character] Container barcode of the DNA fluidx tube. Origin: BWH
27
+ 7) fluidx_container_id: [character] Container id of the DNA fluidx tube. Origin: BWH
28
28
  8) programme_id: [character] ToLID. Origin: BWH
29
29
  9) specimen_id: [character] Specimen ID. Origin: STS
30
30
  10) tube_name: [character] Name of the submission tube/container.
@@ -73,7 +73,7 @@ pacbio_submissions_container_routine AS (
73
73
  subsam.file_registry_id$ AS eln_file_registry_id,
74
74
  subsam.original_dna_extract AS extraction_id,
75
75
  subsam.name$ AS submission_sample_name,
76
- c_dna.barcode AS fluidx_id,
76
+ c_dna.id AS fluidx_container_id,
77
77
  t.programme_id,
78
78
  t.specimen_id,
79
79
  con.name AS tube_name,
@@ -145,7 +145,7 @@ pacbio_submissions_container_pooled AS (
145
145
  subsam.file_registry_id$ AS eln_file_registry_id,
146
146
  subsam.pooled_sample AS extraction_id,
147
147
  subsam.name$ AS eln_submission_sample_name,
148
- c_pool.barcode AS fluidx_id,
148
+ c_pool.id AS fluidx_container_id,
149
149
  t.programme_id,
150
150
  t.specimen_id,
151
151
  con.name AS tube_name,
@@ -219,7 +219,7 @@ pacbio_submissions_container_legacy_deprecated AS (
219
219
  subsam.file_registry_id$ AS eln_file_registry_id,
220
220
  subsam.original_dna_extract AS extraction_id,
221
221
  subsam.name$ AS submission_sample_name,
222
- c_dna.barcode AS fluidx_id,
222
+ c_dna.id AS fluidx_container_id,
223
223
  t.programme_id,
224
224
  t.specimen_id,
225
225
  con.name AS tube_name,
@@ -283,7 +283,7 @@ pacbio_submissions_plate_automated_manifest AS (
283
283
  subsam.file_registry_id$ AS eln_file_registry_id,
284
284
  subsam.originaL_dna_extract AS extraction_id,
285
285
  subsam.name$ AS submission_sample_name,
286
- c_dna.barcode AS fluidx_id,
286
+ c_dna.id AS fluidx_container_id,
287
287
  t.programme_id,
288
288
  t.specimen_id,
289
289
  con.name AS tube_name,
@@ -350,7 +350,7 @@ pacbio_submissions_plate_automated_manifest_pooled AS (
350
350
  subsam.file_registry_id$ AS eln_file_registry_id,
351
351
  subsam.pooled_sample AS extraction_id,
352
352
  subsam.name$ AS submission_sample_name,
353
- c_pool.barcode AS fluidx_id,
353
+ c_pool.id AS fluidx_container_id,
354
354
  t.programme_id,
355
355
  t.specimen_id,
356
356
  con.name AS tube_name,
@@ -415,7 +415,7 @@ pacbio_submissions_plate_routine AS (
415
415
  subsam.file_registry_id$ AS eln_file_registry_id,
416
416
  subsam.original_dna_extract AS extraction_id,
417
417
  subsam.name$ AS submission_sample_name,
418
- c_dna.barcode AS fluidx_id,
418
+ c_dna.id AS fluidx_container_id,
419
419
  t.programme_id,
420
420
  t.specimen_id,
421
421
  c_subsam.name AS tube_name,
@@ -485,7 +485,7 @@ pacbio_submissions_plate_routine_pooled AS (
485
485
  subsam.file_registry_id$ AS eln_file_registry_id,
486
486
  subsam.pooled_sample AS extraction_id,
487
487
  subsam.name$ AS submission_sample_name,
488
- c_pool.barcode AS fluidx_id,
488
+ c_pool.id AS fluidx_container_id,
489
489
  t.programme_id,
490
490
  t.specimen_id,
491
491
  c_subsam.name AS tube_name,
tol/core/factory.py CHANGED
@@ -189,7 +189,7 @@ def core_data_object(
189
189
  if object_from_datasource is not None:
190
190
  self.__type = object_from_datasource.type
191
191
  self.__attributes = object_from_datasource.attributes
192
- self.__to_one_objects = object_from_datasource.to_one_objects
192
+ self.__to_one_objects = object_from_datasource._to_one_objects
193
193
  break
194
194
 
195
195
  @property
@@ -11,6 +11,27 @@ from .operator import Relational
11
11
  from .relationship import RelationshipConfig
12
12
 
13
13
 
14
+ def requested_fields_to_tree(func):
15
+ """
16
+ Allows `requested_fields` keyword arguments to be supplied to methods if a
17
+ `requested_tree` object has not been given.
18
+ """
19
+
20
+ def wrapper(self, tablename, *args, **kwargs):
21
+ if 'requested_fields' in kwargs:
22
+ if 'requested_tree' in kwargs:
23
+ msg = 'Both requested_fields and requested_tree arguments given'
24
+ raise TypeError(msg)
25
+ flds = kwargs.pop('requested_fields')
26
+ if flds and tablename:
27
+ kwargs['requested_tree'] = ReqFieldsTree(
28
+ tablename, self, requested_fields=flds
29
+ )
30
+ return func(self, tablename, *args, **kwargs)
31
+
32
+ return wrapper
33
+
34
+
14
35
  class ReqFieldsTree:
15
36
  """
16
37
  Acts as a template for which related objects and attributes to fetch from
tol/dummy/__init__.py ADDED
@@ -0,0 +1,6 @@
1
+ # SPDX-FileCopyrightText: 2026 Genome Research Ltd.
2
+ #
3
+ # SPDX-License-Identifier: MIT
4
+
5
+ from .dummy_datasource import DummyDataSource # noqa
6
+ from .factory import create_dummy_datasource # noqa
tol/dummy/client.py ADDED
@@ -0,0 +1,88 @@
1
+ # SPDX-FileCopyrightText: 2024 Genome Research Ltd.
2
+ #
3
+ # SPDX-License-Identifier: MIT
4
+
5
+ import string
6
+ from typing import Iterable, Optional
7
+
8
+ from .converter import DummyTransfer
9
+ from ..core import HttpClient
10
+
11
+
12
+ class DummyClient(HttpClient):
13
+ """
14
+ Simulates dummy objects retrieval from a remote API.
15
+ """
16
+
17
+ def __init__(
18
+ self,
19
+ ) -> None:
20
+ super().__init__()
21
+
22
+ def get_detail(
23
+ self,
24
+ object_type: str,
25
+ object_ids: Iterable[str]
26
+ ) -> Optional[DummyTransfer]:
27
+ """
28
+ Gets a list of Dummy transfers for the objects of specified
29
+ `object_type` and `object_id`, or returns None if not found.
30
+ """
31
+ return self.__get_detail_data(object_type, object_ids)
32
+
33
+ def get_list(
34
+ self,
35
+ object_type: str
36
+ ) -> Optional[DummyTransfer]:
37
+ return self.__get_detail_data(object_type, range(10000))
38
+
39
+ def __get_detail_data(
40
+ self,
41
+ object_type: str,
42
+ object_ids: Iterable[str]
43
+ ) -> Optional[DummyTransfer]:
44
+ if object_type == 'category':
45
+ return self.__get_detail_category(object_type, object_ids)
46
+ return self.__get_detail_record(object_type, object_ids)
47
+
48
+ def __get_detail_record(
49
+ self,
50
+ object_type: str,
51
+ object_ids: Iterable[str]
52
+ ) -> Optional[DummyTransfer]:
53
+ """
54
+ Gets a list of dummy objects
55
+ """
56
+ ret = []
57
+ for object_id in object_ids:
58
+ if object_id < 10000:
59
+ ret.append({
60
+ 'id': object_id,
61
+ 'little_string': ['a', 'b', 'c'][int(object_id) % 3],
62
+ 'big_string': string.ascii_letters[int(object_id) % 52],
63
+ 'int': object_id,
64
+ 'bool': int(object_id) % 2 == 0,
65
+ 'date': f'2024-01-{(int(object_id) % 28) + 1:02d}',
66
+ 'type': object_type,
67
+ 'category': ['cat1', 'cat2', 'cat3', 'cat4'][int(object_id) % 4],
68
+ })
69
+ else:
70
+ ret.append(None)
71
+ return ret
72
+
73
+ def __get_detail_category(
74
+ self,
75
+ object_type: str,
76
+ object_ids: Iterable[str]
77
+ ) -> Optional[DummyTransfer]:
78
+ """
79
+ Gets a list of dummy objects
80
+ """
81
+ ret = []
82
+ for object_id in object_ids:
83
+ ret.append({
84
+ 'id': object_id,
85
+ 'name': object_id.upper(),
86
+ 'type': object_type
87
+ })
88
+ return ret
tol/dummy/converter.py ADDED
@@ -0,0 +1,48 @@
1
+ # SPDX-FileCopyrightText: 2024 Genome Research Ltd.
2
+ #
3
+ # SPDX-License-Identifier: MIT
4
+
5
+ from typing import Any, List, Optional
6
+
7
+ from .parser import Parser
8
+ from ..core import DataObject
9
+
10
+
11
+ DummyObject = dict[str, Any]
12
+ DummyTransfer = List[DummyObject]
13
+
14
+
15
+ class DummyConverter():
16
+
17
+ """
18
+ Converts from Dummy transfers to instances of
19
+ `DataObject`.
20
+ """
21
+
22
+ def __init__(
23
+ self,
24
+ parser: Parser
25
+ ) -> None:
26
+
27
+ self.__parser = parser
28
+
29
+ def convert(self, input_: DummyTransfer) -> DataObject:
30
+ """
31
+ Converts a DummyTransfer containing a detail (single) result
32
+ """
33
+ return self.__parser.parse(input_)
34
+
35
+ def convert_list(
36
+ self,
37
+ input_: DummyTransfer
38
+ ) -> tuple[list[DataObject], Optional[int]]:
39
+ """
40
+ Converts a DummyTransfer containing a list of results. Also
41
+ returns a count of the total results meeting.
42
+ """
43
+
44
+ return [
45
+ self.__parser.parse(json_obj)
46
+ for json_obj in input_
47
+ if json_obj is not None
48
+ ], None