tol-sdk 1.7.4__py3-none-any.whl → 1.7.5b2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. tol/api_base/__init__.py +1 -0
  2. tol/api_base/blueprint.py +19 -8
  3. tol/{s3/data_upload/blueprint.py → api_base/data_upload.py} +21 -6
  4. tol/api_base/pipeline_steps.py +4 -4
  5. tol/api_client/api_datasource.py +8 -8
  6. tol/api_client/converter.py +38 -52
  7. tol/api_client/factory.py +21 -19
  8. tol/api_client/parser.py +138 -98
  9. tol/api_client/view.py +118 -43
  10. tol/core/__init__.py +2 -1
  11. tol/core/data_object.py +27 -9
  12. tol/core/data_object_converter.py +37 -2
  13. tol/core/factory.py +51 -62
  14. tol/core/validate.py +1 -0
  15. tol/ena/client.py +61 -10
  16. tol/ena/ena_datasource.py +16 -10
  17. tol/ena/ena_methods.py +33 -32
  18. tol/ena/parser.py +15 -2
  19. tol/flows/converters/__init__.py +2 -0
  20. tol/flows/converters/incoming_sample_to_ena_sample_converter.py +130 -0
  21. tol/flows/converters/incoming_sample_to_incoming_sample_with_lists_converter.py +46 -0
  22. tol/s3/__init__.py +0 -1
  23. tol/sql/model.py +1 -1
  24. tol/sql/pipeline_step/factory.py +1 -1
  25. tol/sql/sql_converter.py +7 -1
  26. tol/validators/__init__.py +12 -1
  27. tol/validators/allowed_keys.py +17 -12
  28. tol/validators/allowed_values.py +21 -63
  29. tol/validators/allowed_values_from_datasource.py +89 -0
  30. tol/validators/assert_on_condition.py +56 -0
  31. tol/validators/ena_checklist.py +73 -0
  32. tol/validators/ena_submittable.py +61 -0
  33. tol/validators/interfaces/__init__.py +5 -0
  34. tol/validators/interfaces/condition_evaluator.py +102 -0
  35. tol/validators/min_one_valid_value.py +55 -0
  36. tol/validators/mutually_exclusive.py +111 -0
  37. tol/validators/regex.py +30 -23
  38. tol/validators/regex_by_value.py +33 -33
  39. tol/validators/specimens_have_same_taxon.py +60 -0
  40. tol/validators/sts_fields.py +88 -0
  41. tol/validators/tolid.py +110 -0
  42. tol/validators/unique_values.py +25 -17
  43. tol/validators/unique_whole_organisms.py +109 -0
  44. {tol_sdk-1.7.4.dist-info → tol_sdk-1.7.5b2.dist-info}/METADATA +1 -1
  45. {tol_sdk-1.7.4.dist-info → tol_sdk-1.7.5b2.dist-info}/RECORD +49 -36
  46. tol/s3/data_upload/__init__.py +0 -3
  47. {tol_sdk-1.7.4.dist-info → tol_sdk-1.7.5b2.dist-info}/WHEEL +0 -0
  48. {tol_sdk-1.7.4.dist-info → tol_sdk-1.7.5b2.dist-info}/entry_points.txt +0 -0
  49. {tol_sdk-1.7.4.dist-info → tol_sdk-1.7.5b2.dist-info}/licenses/LICENSE +0 -0
  50. {tol_sdk-1.7.4.dist-info → tol_sdk-1.7.5b2.dist-info}/top_level.txt +0 -0
tol/api_client/parser.py CHANGED
@@ -6,12 +6,12 @@ from __future__ import annotations
6
6
 
7
7
  import typing
8
8
  from abc import ABC, abstractmethod
9
- from collections.abc import Mapping
10
- from typing import Any, Iterable, Optional
9
+ from collections.abc import Iterable
10
+ from typing import Any
11
11
 
12
12
  from dateutil.parser import parse as dateutil_parse
13
13
 
14
- from ..core import DataObject
14
+ from ..core import DataObject, ReqFieldsTree
15
15
 
16
16
  if typing.TYPE_CHECKING:
17
17
  from ..core import DataSource
@@ -27,23 +27,12 @@ class Parser(ABC):
27
27
  instances
28
28
  """
29
29
 
30
- def parse_iterable(
31
- self,
32
- transfers: Iterable[JsonApiResource]
33
- ) -> Iterable[DataObject]:
34
- """
35
- Parses an `Iterable` of JSON:API transfer resources
36
- """
37
-
38
- return (
39
- self.parse(t) for t in transfers
40
- )
41
-
42
30
  @abstractmethod
43
- def parse(self, transfer: JsonApiResource) -> DataObject:
31
+ def parse_json_doc(self, transfer: JsonApiDoc) -> Iterable[DataObject]:
44
32
  """
45
- Parses an individual JSON:API transfer resource to a
46
- `DataObject` instance
33
+ Parses a JSON:API document, which includes a `data` array and possibly
34
+ an `included` array of related objects, returning an list of
35
+ `DataObject`.
47
36
  """
48
37
 
49
38
  @abstractmethod
@@ -62,22 +51,75 @@ class Parser(ABC):
62
51
 
63
52
 
64
53
  class DefaultParser(Parser):
54
+ def __init__(
55
+ self,
56
+ data_source_dict: dict[str, DataSource],
57
+ requested_tree: ReqFieldsTree | None = None,
58
+ ) -> None:
59
+ self.__ds_dict = data_source_dict
60
+ self.__requested_tree = requested_tree
65
61
 
66
- def __init__(self, data_source_dict: dict[str, DataSource]) -> None:
67
- self.__dict = data_source_dict
68
-
69
- def parse(self, transfer: JsonApiResource) -> DataObject:
62
+ def parse_json_doc(
63
+ self,
64
+ transfer: JsonApiDoc,
65
+ ) -> Iterable[DataObject]:
66
+ data_objects = list(self.__parse_iterable(transfer['data']))
67
+ if tree := self.__requested_tree:
68
+ included = DataObjectCatalog(self.__parse_iterable(transfer.get('included')))
69
+ for obj in data_objects:
70
+ self.__link_related_obejcts(tree, included, obj)
71
+ return data_objects
72
+
73
+ def __link_related_obejcts(
74
+ self,
75
+ tree: ReqFieldsTree,
76
+ included: DataObjectCatalog,
77
+ data_object: DataObject,
78
+ ) -> None:
79
+ """
80
+ Using the `ReqFieldsTree` recursively replaces related stub
81
+ `DataObject`s with `DataObject`s from the `incldued`
82
+ `DataObjectCatalog` which were built from the JSON:API "included"
83
+ array.
84
+ """
85
+ for name, sub_tree in tree.sub_trees():
86
+ if name in tree.to_one_names():
87
+ if (related := data_object._to_one_objects.get(name)) and (
88
+ inc := included.fetch(related)
89
+ ):
90
+ # Link the to-one object
91
+ setattr(data_object, name, inc)
92
+ self.__link_related_obejcts(sub_tree, included, inc)
93
+ elif related := data_object._to_many_objects.get(name):
94
+ # Link each to-many object
95
+ for i, rel in enumerate(related):
96
+ if inc := included.fetch(rel):
97
+ related[i] = inc
98
+ self.__link_related_obejcts(sub_tree, included, inc)
99
+
100
+ def __parse_iterable(
101
+ self,
102
+ transfer: list[JsonApiResource],
103
+ ) -> Iterable[DataObject]:
104
+ if transfer:
105
+ if isinstance(transfer, list):
106
+ for json_res in transfer:
107
+ yield self.__parse(json_res)
108
+ else:
109
+ yield self.__parse(transfer)
110
+
111
+ def __parse(self, transfer: JsonApiResource) -> DataObject:
70
112
  type_ = transfer['type']
71
113
  ds = self.__get_data_source(type_)
72
- raw_attributes = transfer.get('attributes')
73
-
74
- attributes = self.__convert_attributes(type_, raw_attributes)
114
+ attributes = self.__convert_attributes(type_, transfer.get('attributes'))
115
+ to_one, to_many = self.__parse_relationships(transfer.get('relationships'))
75
116
 
76
117
  return ds.data_object_factory(
77
- transfer.get('type'),
118
+ type_,
78
119
  id_=transfer.get('id'),
79
120
  attributes=attributes,
80
- to_one=self.__parse_to_ones(transfer)
121
+ to_one=to_one,
122
+ to_many=to_many,
81
123
  )
82
124
 
83
125
  def parse_stats(self, transfer: JsonApiResource) -> dict:
@@ -90,73 +132,54 @@ class DefaultParser(Parser):
90
132
  type_ = transfer.get('type')
91
133
  raw_stats = transfer.get('stats')
92
134
 
93
- return [
94
- self.__convert_group_stats(type_, raw_stat)
95
- for raw_stat in raw_stats
96
- ]
135
+ return [self.__convert_group_stats(type_, raw_stat) for raw_stat in raw_stats]
97
136
 
98
137
  def __get_data_source(self, type_: str) -> DataSource:
99
- return self.__dict[type_]
100
-
101
- def __parse_to_ones(
102
- self,
103
- transfer: JsonApiResource
104
- ) -> dict[str, DataObject]:
105
-
106
- return {
107
- k: self.__parse_to_one(v)
108
- for k, v in transfer.get('relationships', {}).items()
109
- if self.__relationship_is_to_one(v)
110
- }
111
-
112
- def __parse_to_one(
113
- self,
114
- v: dict[str, Any] | None
115
- ) -> DataObject | None:
116
-
117
- if v is None:
118
- return None
119
- else:
120
- return self.parse(v.get('data', {}))
121
-
122
- def __relationship_is_to_one(
123
- self,
124
- relation: dict[str, Any] | None
125
- ) -> bool:
126
-
127
- if relation is None:
128
- return True
129
-
130
- return isinstance(
131
- relation.get('data'),
132
- Mapping
138
+ return self.__ds_dict[type_]
139
+
140
+ def __parse_relationships(
141
+ self, related: dict[str, JsonApiResource] | None
142
+ ) -> tuple[dict[str, DataObject | None], dict[str, list[DataObject]]]:
143
+ to_one = {}
144
+ to_many = {}
145
+ if related:
146
+ for name, value in related.items():
147
+ if value is None:
148
+ # This must be a to-one relation because to-many relations
149
+ # are never null. (If the to-many has been fetched it
150
+ # will be an empty list. If it has not been fetched it
151
+ # will be a dict containing a "links" key.)
152
+ to_one[name] = None
153
+ elif data := value.get('data'):
154
+ if isinstance(data, list):
155
+ to_many[name] = [self.__make_stub_data_object(x) for x in data]
156
+ else:
157
+ to_one[name] = None if data is None else self.__make_stub_data_object(data)
158
+ return to_one, to_many
159
+
160
+ def __make_stub_data_object(self, transfer: JsonApiResource):
161
+ type_ = transfer['type']
162
+ ds = self.__get_data_source(type_)
163
+ return ds.data_object_factory(
164
+ type_,
165
+ id_=transfer['id'],
166
+ stub=True,
133
167
  )
134
168
 
135
169
  def __convert_attributes(
136
- self,
137
- type_: str,
138
- attributes: Optional[dict[str, Any]]
170
+ self, type_: str, attributes: dict[str, Any] | None
139
171
  ) -> dict[str, Any]:
140
-
141
172
  if not attributes:
142
173
  return {}
143
174
 
144
175
  datetime_keys = self.__get_datetime_keys(type_)
145
176
 
146
177
  return {
147
- k: (
148
- dateutil_parse(v)
149
- if k in datetime_keys and v is not None
150
- else v
151
- )
178
+ k: (dateutil_parse(v) if k in datetime_keys and v is not None else v)
152
179
  for k, v in attributes.items()
153
180
  }
154
181
 
155
- def __convert_stats(
156
- self,
157
- type_: str,
158
- stats: Optional[dict[str, Any]]
159
- ) -> dict[str, Any]:
182
+ def __convert_stats(self, type_: str, stats: dict[str, Any] | None) -> dict[str, Any]:
160
183
  # {'field': {'min': value, 'max': value}
161
184
  if not stats:
162
185
  return {}
@@ -167,9 +190,7 @@ class DefaultParser(Parser):
167
190
  fieldname: {
168
191
  k: (
169
192
  dateutil_parse(v, ignoretz=True)
170
- if fieldname in datetime_keys
171
- and v is not None
172
- and k in ['min', 'max']
193
+ if fieldname in datetime_keys and v is not None and k in ['min', 'max']
173
194
  else v
174
195
  )
175
196
  for k, v in fieldstats.items()
@@ -178,11 +199,8 @@ class DefaultParser(Parser):
178
199
  }
179
200
 
180
201
  def __convert_group_stats(
181
- self,
182
- type_: str,
183
- raw_stats: dict[str, dict[str, Any]]
202
+ self, type_: str, raw_stats: dict[str, dict[str, Any]]
184
203
  ) -> dict[str, dict[str, Any]]:
185
-
186
204
  st = raw_stats.pop('stats')
187
205
  count = st.pop('count', None)
188
206
 
@@ -193,19 +211,41 @@ class DefaultParser(Parser):
193
211
 
194
212
  return raw_stats
195
213
 
196
- def __get_datetime_keys(self, type_: str) -> list[str]:
214
+ def __get_datetime_keys(self, type_: str) -> set[str]:
215
+ """
216
+ Gets called on each object, which is somewhat inefficient. Should be
217
+ cached for each object type, but don't want `self` in a cache because
218
+ it could leak memory.
219
+ """
197
220
  ds = self.__get_data_source(type_)
198
- attribute_types = ds.attribute_types.get(
199
- type_,
200
- {}
201
- )
221
+ attribute_types = ds.attribute_types.get(type_, {})
222
+
223
+ return {attr for attr, typ in attribute_types.items() if self.__type_is_datetime(typ)}
224
+
225
+ def __type_is_datetime(self, typ: str, /) -> bool:
226
+ lc_type = typ.lower()
227
+
228
+ return 'date' in lc_type or 'time' in lc_type
229
+
230
+
231
+ class DataObjectCatalog:
232
+ """
233
+ A catalog of `DataObject`s keyed by their `type` and `id` attributes.
234
+ """
235
+
236
+ def __init__(self, data_obj_list: Iterable[DataObject] | None):
237
+ self.__obj_index = {}
238
+ if data_obj_list:
239
+ for obj in data_obj_list:
240
+ self.store(obj)
202
241
 
203
- return [
204
- k for k, v in attribute_types.items()
205
- if self.__value_is_datetime(v)
206
- ]
242
+ def __len__(self):
243
+ return len(self.__obj_index)
207
244
 
208
- def __value_is_datetime(self, __v: str) -> bool:
209
- lower_ = __v.lower()
245
+ def store(self, obj) -> None:
246
+ key = obj.type, obj.id
247
+ self.__obj_index[key] = obj
210
248
 
211
- return 'date' in lower_ or 'time' in lower_
249
+ def fetch(self, obj) -> DataObject | None:
250
+ key = obj.type, obj.id
251
+ return self.__obj_index.get(key)
tol/api_client/view.py CHANGED
@@ -2,18 +2,21 @@
2
2
  #
3
3
  # SPDX-License-Identifier: MIT
4
4
 
5
+ from __future__ import annotations
6
+
5
7
  import urllib
6
8
  from abc import ABC, abstractmethod
9
+ from collections.abc import Iterable
7
10
  from datetime import date
8
- from typing import Any, Dict, Iterable, List, Optional, Union
11
+ from typing import Any
9
12
 
10
13
  from ..core import DataObject
11
14
  from ..core.requested_fields import ReqFieldsTree
12
15
 
13
- DocumentMeta = Dict[str, Any]
14
- DumpDict = Dict[str, Any]
15
- DumpDictMany = List[DumpDict]
16
- ResponseDict = Dict[str, Union[DumpDict, DumpDictMany]]
16
+ DocumentMeta = dict[str, Any]
17
+ DumpDict = dict[str, Any]
18
+ DumpDictMany = list[DumpDict]
19
+ ResponseDict = dict[str, DumpDict | DumpDictMany]
17
20
 
18
21
 
19
22
  class View(ABC):
@@ -26,7 +29,7 @@ class View(ABC):
26
29
  def dump(
27
30
  self,
28
31
  data_object: DataObject,
29
- document_meta: Optional[DocumentMeta] = None,
32
+ document_meta: DocumentMeta | None = None,
30
33
  ) -> ResponseDict:
31
34
  """
32
35
  Create a JSON:API response for an individual DataObject result
@@ -36,7 +39,7 @@ class View(ABC):
36
39
  def dump_bulk(
37
40
  self,
38
41
  data_objects: Iterable[DataObject],
39
- document_meta: Optional[DocumentMeta] = None,
42
+ document_meta: DocumentMeta | None = None,
40
43
  ) -> ResponseDict:
41
44
  """
42
45
  Create a JSON:API response for an Iterable of DataObject results
@@ -56,7 +59,7 @@ class DefaultView(View):
56
59
  self,
57
60
  requested_tree: ReqFieldsTree,
58
61
  prefix: str = '',
59
- hop_limit: Optional[int] = None,
62
+ hop_limit: int | None = None,
60
63
  ) -> None:
61
64
  """
62
65
  Args:
@@ -77,14 +80,17 @@ class DefaultView(View):
77
80
  def dump(
78
81
  self,
79
82
  data_object: DataObject,
80
- document_meta: Optional[DocumentMeta] = None,
83
+ document_meta: DocumentMeta | None = None,
81
84
  ) -> ResponseDict:
82
- response = {
83
- 'data': self.__dump_object(
84
- data_object,
85
- tree=self.__requested_tree,
86
- ),
87
- }
85
+ included = IncludedDumps()
86
+ dumped = self.__dump_object(
87
+ data_object,
88
+ included,
89
+ tree=self.__requested_tree,
90
+ )
91
+ response = {'data': dumped}
92
+ if included:
93
+ response['included'] = included.as_list()
88
94
  if document_meta is not None:
89
95
  response['meta'] = document_meta
90
96
  return response
@@ -92,16 +98,20 @@ class DefaultView(View):
92
98
  def dump_bulk(
93
99
  self,
94
100
  data_objects: Iterable[DataObject],
95
- document_meta: Optional[DocumentMeta] = None,
101
+ document_meta: DocumentMeta | None = None,
96
102
  ) -> ResponseDict:
103
+ included = IncludedDumps()
97
104
  dumped = [
98
105
  self.__dump_object(
99
106
  data_object,
107
+ included,
100
108
  tree=self.__requested_tree,
101
109
  )
102
110
  for data_object in data_objects
103
111
  ]
104
112
  response = {'data': dumped}
113
+ if included:
114
+ response['included'] = included.as_list()
105
115
  if document_meta is not None:
106
116
  response['meta'] = document_meta
107
117
  return response
@@ -109,14 +119,20 @@ class DefaultView(View):
109
119
  def __dump_object(
110
120
  self,
111
121
  data_object: DataObject,
122
+ included: IncludedDumps,
112
123
  tree: ReqFieldsTree,
113
124
  ) -> DumpDict:
114
- dump = {'type': data_object.type, 'id': data_object.id}
125
+ """
126
+ Returns a JSON:API resource object for the `data_object`, recursively
127
+ adding related objects as specified in the `tree: ReqFieldsTree`
128
+ argument. Related objects are accumulated in the `incldued` array.
129
+ """
130
+ dump = {'type': data_object.type, 'id': null_or_str(data_object.id)}
115
131
  # Stub trees are created by requested_fields paths ending in ".id"
116
132
  if not tree.is_stub:
117
133
  self.__add_attributes(data_object, dump, tree)
118
134
  if tree.has_relationships:
119
- self.__add_relationships(data_object, dump, tree)
135
+ self.__add_relationships(data_object, dump, included, tree)
120
136
  return dump
121
137
 
122
138
  def __add_attributes(
@@ -125,6 +141,10 @@ class DefaultView(View):
125
141
  dump: DumpDict,
126
142
  tree: ReqFieldsTree | None,
127
143
  ):
144
+ """
145
+ If attributes are specified in the `tree: ReqFieldsTree`, adds only
146
+ those to the dump. Default is to add all attribtues.
147
+ """
128
148
  if tree and (attr_names := tree.attribute_names):
129
149
  # Only add requested attributes
130
150
  dump['attributes'] = self.__convert_attributes(
@@ -138,55 +158,110 @@ class DefaultView(View):
138
158
  self,
139
159
  data_object: DataObject,
140
160
  dump: DumpDict,
161
+ included: IncludedDumps,
141
162
  tree: ReqFieldsTree | None = None,
142
163
  ) -> DumpDict:
143
164
  rel_dict = self.__dump_to_one_relationships(
144
- data_object, tree
145
- ) | self.__dump_to_many_relationships(data_object, tree)
165
+ data_object, included, tree
166
+ ) | self.__dump_to_many_relationships(data_object, included, tree)
146
167
  if rel_dict:
147
168
  dump['relationships'] = rel_dict
148
169
 
149
170
  def __dump_to_one_relationships(
150
171
  self,
151
172
  data_object: DataObject,
173
+ included: IncludedDumps,
152
174
  tree: ReqFieldsTree,
153
175
  ) -> RelationshipDump:
154
176
  to_ones = {}
155
- for name in tree.to_one_names():
156
- if name in data_object._to_one_objects:
177
+ for rel in tree.to_one_names():
178
+ if rel in data_object._to_one_objects:
157
179
  one_dump = None
158
- if one := data_object._to_one_objects.get(name):
159
- if sub_tree := tree.get_sub_tree(name):
160
- one_dump = {'data': self.__dump_object(one, tree=sub_tree)}
161
- else:
162
- one_dump = {'data': {'type': one.type, 'id': one.id}}
163
- to_ones[name] = one_dump
180
+ if one := data_object._to_one_objects.get(rel):
181
+ one_dump = {'data': self.__dump_stub(one, rel)}
182
+ if sub_tree := tree.get_sub_tree(rel):
183
+ included.add_dump(self.__dump_object(one, included, tree=sub_tree))
184
+ to_ones[rel] = one_dump
164
185
  return to_ones
165
186
 
166
187
  def __dump_to_many_relationships(
167
188
  self,
168
189
  data_object: DataObject,
190
+ included: IncludedDumps,
169
191
  tree: ReqFieldsTree,
170
192
  ) -> RelationshipDump:
171
- quoted_id = urllib.parse.quote(str(data_object.id), safe='')
193
+ oid = data_object.id
194
+ quoted_id = None if oid is None else urllib.parse.quote(str(oid), safe='')
172
195
  to_many = {}
173
- for name in tree.to_many_names():
174
- if sub_tree := tree.get_sub_tree(name):
175
- to_many[name] = {
176
- 'data': [
177
- self.__dump_object(x, tree=sub_tree) for x in getattr(data_object, name)
178
- ]
179
- }
180
- else:
181
- link = f'{self.__prefix}/{data_object.type}/{quoted_id}/{name}'
182
- to_many[name] = {'links': {'related': link}}
196
+ for rel in tree.to_many_names():
197
+ sub_tree = tree.get_sub_tree(rel)
198
+ if sub_tree and rel in data_object._to_many_objects:
199
+ many_obj = data_object._to_many_objects.get(rel)
200
+ to_many[rel] = [self.__dump_stub(x, rel) for x in many_obj]
201
+ for obj in many_obj:
202
+ included.add_dump(self.__dump_object(obj, included, sub_tree))
203
+ elif quoted_id:
204
+ link = f'{self.__prefix}/{data_object.type}/{quoted_id}/{rel}'
205
+ to_many[rel] = {'links': {'related': link}}
183
206
  return to_many
184
207
 
208
+ def __dump_stub(self, obj: DataObject, rel_name: str) -> dict[str, str]:
209
+ """
210
+ Create a stub JSON:API object, known in the JSON:API spec as
211
+ a "resource identifier object". Contains a sanity check for the `id`
212
+ attribute having a value. If we want to support, for example,
213
+ storing related objects with auto-incremented IDs, we will need to
214
+ implement creating `lid` local IDs for linking to resource objects in
215
+ the `included` array.
216
+ """
217
+ if obj.id is None:
218
+ msg = (
219
+ f"Cannot serialise '{obj.type}' object in relation"
220
+ f" '{rel_name}' because it has no `id` attribute"
221
+ )
222
+ raise ValueError(msg)
223
+ return {'type': obj.type, 'id': str(obj.id)}
224
+
185
225
  def __convert_attributes(self, attributes: dict[str, Any]) -> dict[str, Any]:
186
226
  return {k: self.__convert_value(v) for k, v in attributes.items()}
187
227
 
188
- def __convert_value(self, __v: Any) -> Any:
189
- if isinstance(__v, date):
228
+ def __convert_value(self, val: Any, /) -> Any:
229
+ if isinstance(val, date):
190
230
  # `datetime` is a subclass of `date`
191
- return __v.isoformat()
192
- return __v
231
+ return val.isoformat()
232
+ return val
233
+
234
+
235
+ def null_or_str(oid: Any, /):
236
+ """
237
+ Return `oid` as a string if it isn't `None`
238
+ """
239
+ return None if oid is None else str(oid)
240
+
241
+
242
+ class IncludedDumps:
243
+ """
244
+ Maintains objects to be returned in the JSON:API `included` list, indexed
245
+ by tuples of `(type, id)`.
246
+ """
247
+
248
+ def __init__(self):
249
+ self.__type_id: dict[tuple[str, str], DumpDict] = {}
250
+
251
+ def __len__(self):
252
+ """
253
+ Implemented so that an `IncludedDumps` object returns true in boolean
254
+ context when it has entries.
255
+ """
256
+ return len(self.__type_id)
257
+
258
+ def as_list(self):
259
+ return list(self.__type_id.values())
260
+
261
+ def add_dump(self, dump: DumpDict):
262
+ """
263
+ Add a new DumpDict to the collection.
264
+ """
265
+ key = dump['type'], dump['id']
266
+ if key not in self.__type_id:
267
+ self.__type_id[key] = dump
tol/core/__init__.py CHANGED
@@ -26,7 +26,8 @@ from .data_object import ( # noqa F401
26
26
  )
27
27
  from .data_object_converter import ( # noqa F401
28
28
  DataObjectToDataObjectOrUpdateConverter,
29
- DefaultDataObjectToDataObjectConverter
29
+ DefaultDataObjectToDataObjectConverter,
30
+ SanitisingConverter
30
31
  )
31
32
  from .factory import core_data_object # noqa F401
32
33
  from .http_client import HttpClient # noqa F401
tol/core/data_object.py CHANGED
@@ -6,8 +6,9 @@ from __future__ import annotations
6
6
 
7
7
  import typing
8
8
  from abc import ABC, abstractmethod
9
+ from collections.abc import Iterable
9
10
  from dataclasses import dataclass
10
- from typing import Any, Iterable, Optional, Protocol, Union
11
+ from typing import Any, Protocol
11
12
 
12
13
  if typing.TYPE_CHECKING:
13
14
  from .operator import Relational
@@ -44,7 +45,7 @@ class DataObject(_AnyKeyProtocol, ABC):
44
45
 
45
46
  @property
46
47
  @abstractmethod
47
- def id(self) -> Optional[str]: # noqa
48
+ def id(self) -> str | None: # noqa
48
49
  """
49
50
  A unique ID by which to identify this object within
50
51
  its type.
@@ -61,7 +62,7 @@ class DataObject(_AnyKeyProtocol, ABC):
61
62
 
62
63
  @property
63
64
  @abstractmethod
64
- def to_one_relationships(self) -> dict[str, Optional[DataObject]]:
65
+ def to_one_relationships(self) -> dict[str, DataObject | None]:
65
66
  """
66
67
  A dictionary of relationships, where this object refers to
67
68
  precisely one other.
@@ -77,20 +78,37 @@ class DataObject(_AnyKeyProtocol, ABC):
77
78
 
78
79
  @property
79
80
  @abstractmethod
80
- def _host(self) -> Union[DataSource, Relational]:
81
+ def _host(self) -> DataSource | Relational:
81
82
  """
82
83
  The DataSource instance that manages DataObject instances of this type
83
84
  """
84
85
 
85
86
  @property
86
87
  @abstractmethod
87
- def _to_one_objects(self) -> dict[str, Optional[DataObject]]:
88
+ def _to_one_objects(self) -> dict[str, DataObject | None]:
88
89
  """
89
- The name: attribute mapping for `DataObject`s set on this instance.
90
+ The name: attribute mapping for to-one `DataObject`s set on this
91
+ instance.
90
92
 
91
- N.B. - This is not equivalent to `to_one_relationships`, as that merges
92
- both set `DataObject` instances and fetched relations from the
93
- `DataSource`. Most users will not need (or want) to use this property.
93
+ Can used to inspect which relations are set on the object without
94
+ triggering auto-fetching of to-one related objects from the `_host`
95
+ `DataSource`.
96
+
97
+ Most users should use `to_one_relationships` instead.
98
+ """
99
+
100
+ @property
101
+ @abstractmethod
102
+ def _to_many_objects(self) -> dict[str, Iterable[DataObject]]:
103
+ """
104
+ The name: attribute mapping for to-many `DataObject`s set on this
105
+ instance.
106
+
107
+ Can used to inspect which relations are set on the object without
108
+ triggering auto-fetching of to-many related objects from the `_host`
109
+ `DataSource`.
110
+
111
+ Most users should use `to_many_relationships` instead.
94
112
  """
95
113
 
96
114
  def get_field_by_name(self, field_name: str) -> Any: