linkml-store 0.1.6__tar.gz → 0.1.7__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of linkml-store might be problematic. Click here for more details.

Files changed (43) hide show
  1. {linkml_store-0.1.6 → linkml_store-0.1.7}/PKG-INFO +1 -1
  2. {linkml_store-0.1.6 → linkml_store-0.1.7}/pyproject.toml +2 -1
  3. {linkml_store-0.1.6 → linkml_store-0.1.7}/src/linkml_store/api/client.py +5 -1
  4. {linkml_store-0.1.6 → linkml_store-0.1.7}/src/linkml_store/api/collection.py +58 -11
  5. {linkml_store-0.1.6 → linkml_store-0.1.7}/src/linkml_store/api/config.py +4 -0
  6. {linkml_store-0.1.6 → linkml_store-0.1.7}/src/linkml_store/api/database.py +77 -3
  7. linkml_store-0.1.7/src/linkml_store/api/stores/chromadb/__init__.py +3 -0
  8. {linkml_store-0.1.6 → linkml_store-0.1.7}/src/linkml_store/api/stores/chromadb/chromadb_collection.py +8 -1
  9. linkml_store-0.1.7/src/linkml_store/api/stores/duckdb/__init__.py +7 -0
  10. {linkml_store-0.1.6 → linkml_store-0.1.7}/src/linkml_store/api/stores/duckdb/duckdb_collection.py +5 -5
  11. {linkml_store-0.1.6 → linkml_store-0.1.7}/src/linkml_store/api/stores/duckdb/duckdb_database.py +3 -3
  12. {linkml_store-0.1.6 → linkml_store-0.1.7}/src/linkml_store/api/stores/hdf5/hdf5_collection.py +1 -1
  13. {linkml_store-0.1.6 → linkml_store-0.1.7}/src/linkml_store/api/stores/mongodb/mongodb_collection.py +8 -2
  14. linkml_store-0.1.7/src/linkml_store/api/stores/solr/__init__.py +3 -0
  15. {linkml_store-0.1.6 → linkml_store-0.1.7}/src/linkml_store/api/stores/solr/solr_collection.py +2 -1
  16. {linkml_store-0.1.6 → linkml_store-0.1.7}/src/linkml_store/api/stores/solr/solr_database.py +1 -0
  17. {linkml_store-0.1.6 → linkml_store-0.1.7}/src/linkml_store/utils/object_utils.py +9 -1
  18. linkml_store-0.1.6/src/linkml_store/index/implementations/__init__.py +0 -0
  19. linkml_store-0.1.6/src/linkml_store/utils/__init__.py +0 -0
  20. {linkml_store-0.1.6 → linkml_store-0.1.7}/LICENSE +0 -0
  21. {linkml_store-0.1.6 → linkml_store-0.1.7}/README.md +0 -0
  22. {linkml_store-0.1.6 → linkml_store-0.1.7}/src/linkml_store/__init__.py +0 -0
  23. {linkml_store-0.1.6 → linkml_store-0.1.7}/src/linkml_store/api/__init__.py +0 -0
  24. {linkml_store-0.1.6 → linkml_store-0.1.7}/src/linkml_store/api/queries.py +0 -0
  25. {linkml_store-0.1.6 → linkml_store-0.1.7}/src/linkml_store/api/stores/__init__.py +0 -0
  26. {linkml_store-0.1.6 → linkml_store-0.1.7}/src/linkml_store/api/stores/chromadb/chromadb_database.py +0 -0
  27. {linkml_store-0.1.6 → linkml_store-0.1.7}/src/linkml_store/api/stores/duckdb/mappings.py +0 -0
  28. {linkml_store-0.1.6/src/linkml_store/api/stores/chromadb → linkml_store-0.1.7/src/linkml_store/api/stores/hdf5}/__init__.py +0 -0
  29. {linkml_store-0.1.6 → linkml_store-0.1.7}/src/linkml_store/api/stores/hdf5/hdf5_database.py +0 -0
  30. {linkml_store-0.1.6/src/linkml_store/api/stores/duckdb → linkml_store-0.1.7/src/linkml_store/api/stores/mongodb}/__init__.py +0 -0
  31. {linkml_store-0.1.6 → linkml_store-0.1.7}/src/linkml_store/api/stores/mongodb/mongodb_database.py +0 -0
  32. {linkml_store-0.1.6 → linkml_store-0.1.7}/src/linkml_store/api/stores/solr/solr_utils.py +0 -0
  33. {linkml_store-0.1.6 → linkml_store-0.1.7}/src/linkml_store/cli.py +0 -0
  34. {linkml_store-0.1.6 → linkml_store-0.1.7}/src/linkml_store/constants.py +0 -0
  35. {linkml_store-0.1.6 → linkml_store-0.1.7}/src/linkml_store/index/__init__.py +0 -0
  36. {linkml_store-0.1.6/src/linkml_store/api/stores/hdf5 → linkml_store-0.1.7/src/linkml_store/index/implementations}/__init__.py +0 -0
  37. {linkml_store-0.1.6 → linkml_store-0.1.7}/src/linkml_store/index/implementations/llm_indexer.py +0 -0
  38. {linkml_store-0.1.6 → linkml_store-0.1.7}/src/linkml_store/index/implementations/simple_indexer.py +0 -0
  39. {linkml_store-0.1.6 → linkml_store-0.1.7}/src/linkml_store/index/indexer.py +0 -0
  40. {linkml_store-0.1.6/src/linkml_store/api/stores/mongodb → linkml_store-0.1.7/src/linkml_store/utils}/__init__.py +0 -0
  41. {linkml_store-0.1.6 → linkml_store-0.1.7}/src/linkml_store/utils/format_utils.py +0 -0
  42. {linkml_store-0.1.6 → linkml_store-0.1.7}/src/linkml_store/utils/io.py +0 -0
  43. {linkml_store-0.1.6 → linkml_store-0.1.7}/src/linkml_store/utils/sql_utils.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: linkml-store
3
- Version: 0.1.6
3
+ Version: 0.1.7
4
4
  Summary: linkml-store
5
5
  License: MIT
6
6
  Author: Author 1
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "linkml-store"
3
- version = "0.1.6"
3
+ version = "0.1.7"
4
4
  description = "linkml-store"
5
5
  authors = ["Author 1 <author@org.org>"]
6
6
  license = "MIT"
@@ -36,6 +36,7 @@ sphinx = {version = ">=6.1.3"}
36
36
  sphinx-rtd-theme = {version = ">=1.0.0"}
37
37
  sphinx-autodoc-typehints = {version = "<2.0.0"}
38
38
  sphinx-click = {version = ">=4.3.0"}
39
+ sphinx-automodapi = "*"
39
40
  myst-parser = {version = ">=0.18.1"}
40
41
  furo = {version = "*"}
41
42
  nbsphinx = "*"
@@ -21,7 +21,11 @@ HANDLE_MAP = {
21
21
 
22
22
  class Client:
23
23
  """
24
- A client provides access to named collections.
24
+ A client is the top-level object for interacting with databases.
25
+
26
+ A client has access to one or more :class:`Database` objects.
27
+
28
+ Each database consists of a number of :class:`.Collection` objects.
25
29
 
26
30
  Examples
27
31
  --------
@@ -10,6 +10,7 @@ from linkml_runtime.linkml_model.meta import ArrayExpression
10
10
  from pydantic import BaseModel
11
11
 
12
12
  from linkml_store.index import get_indexer
13
+ from linkml_store.utils.object_utils import clean_empties
13
14
 
14
15
  try:
15
16
  from linkml.validator.report import ValidationResult
@@ -61,14 +62,27 @@ class Collection:
61
62
 
62
63
  @property
63
64
  def name(self) -> str:
65
+ """
66
+ Return the name of the collection
67
+
68
+ :return:
69
+ """
64
70
  return self.metadata.name
65
71
 
66
72
  @property
67
73
  def hidden(self) -> bool:
74
+ """
75
+ True if the collection is hidden.
76
+
77
+ An example of a hidden collection is a collection that indexes another
78
+ collection
79
+
80
+ :return: True if the collection is hidden
81
+ """
68
82
  return self.metadata.hidden
69
83
 
70
84
  @property
71
- def _target_class_name(self):
85
+ def target_class_name(self):
72
86
  """
73
87
  Return the name of the class that this collection represents
74
88
 
@@ -82,7 +96,7 @@ class Collection:
82
96
  return self.name
83
97
 
84
98
  @property
85
- def _alias(self):
99
+ def alias(self):
86
100
  """
87
101
  Return the primary name/alias used for the collection.
88
102
 
@@ -156,7 +170,7 @@ class Collection:
156
170
  raise NotImplementedError
157
171
 
158
172
  def _create_query(self, **kwargs) -> Query:
159
- return Query(from_table=self._alias, **kwargs)
173
+ return Query(from_table=self.alias, **kwargs)
160
174
 
161
175
  def query(self, query: Query, **kwargs) -> QueryResult:
162
176
  """
@@ -201,9 +215,28 @@ class Collection:
201
215
  :param kwargs:
202
216
  :return:
203
217
  """
204
- id_field = self.identifier_field
205
- q = self._create_query(where_clause={id_field: ids})
206
- return self.query(q, **kwargs)
218
+ # TODO
219
+ id_field = self.identifier_attribute_name
220
+ return self.find({id_field: ids})
221
+
222
+ def get_one(self, id: IDENTIFIER, **kwargs) -> Optional[OBJECT]:
223
+ """
224
+ Get one object by ID.
225
+
226
+ :param id:
227
+ :param kwargs:
228
+ :return:
229
+ """
230
+ if not id:
231
+ raise ValueError("Must pass an ID")
232
+ id_field = self.identifier_attribute_name
233
+ if not id_field:
234
+ raise ValueError(f"No identifier for {self.name}")
235
+ w = {id_field: id}
236
+ qr = self.find(w)
237
+ if qr.num_rows == 1:
238
+ return qr.rows[0]
239
+ return None
207
240
 
208
241
  def find(self, where: Optional[Any] = None, **kwargs) -> QueryResult:
209
242
  """
@@ -216,6 +249,18 @@ class Collection:
216
249
  query = self._create_query(where_clause=where)
217
250
  return self.query(query, **kwargs)
218
251
 
252
+ def find_iter(self, where: Optional[Any] = None, **kwargs) -> Iterator[OBJECT]:
253
+ """
254
+ Find objects in the collection using a where query.
255
+
256
+ :param where:
257
+ :param kwargs:
258
+ :return:
259
+ """
260
+ qr = self.find(where=where, limit=-1, **kwargs)
261
+ for row in qr.rows:
262
+ yield row
263
+
219
264
  def search(
220
265
  self,
221
266
  query: str,
@@ -362,10 +407,11 @@ class Collection:
362
407
  """
363
408
  sv = self.parent.schema_view
364
409
  if sv:
365
- cls = sv.get_class(self._target_class_name)
410
+ cls = sv.get_class(self.target_class_name)
366
411
  return cls
367
412
  return None
368
413
 
414
+ @property
369
415
  def identifier_attribute_name(self) -> Optional[str]:
370
416
  """
371
417
  Return the name of the identifier attribute for the collection.
@@ -376,7 +422,7 @@ class Collection:
376
422
  """
377
423
  cd = self.class_definition()
378
424
  if cd:
379
- for att in cd.attributes.values():
425
+ for att in self.parent.schema_view.class_induced_slots(cd.name):
380
426
  if att.identifier:
381
427
  return att.name
382
428
  return None
@@ -411,7 +457,7 @@ class Collection:
411
457
  :param max_sample_size:
412
458
  :return:
413
459
  """
414
- cd = ClassDefinition(self._target_class_name)
460
+ cd = ClassDefinition(self.target_class_name)
415
461
  keys = defaultdict(list)
416
462
  for obj in objs[0:max_sample_size]:
417
463
  if isinstance(obj, BaseModel):
@@ -474,7 +520,7 @@ class Collection:
474
520
  array_expr = ArrayExpression(exact_number_dimensions=len(exact_dimensions_list[0]))
475
521
  cd.attributes[k].array = array_expr
476
522
  sv = self.parent.schema_view
477
- sv.schema.classes[self._target_class_name] = cd
523
+ sv.schema.classes[self.target_class_name] = cd
478
524
  sv.set_modified()
479
525
  return cd
480
526
 
@@ -511,8 +557,9 @@ class Collection:
511
557
  validator = Validator(self.parent.schema_view.schema, validation_plugins=validation_plugins)
512
558
  cd = self.class_definition()
513
559
  if not cd:
514
- raise ValueError(f"Cannot find class definition for {self._target_class_name}")
560
+ raise ValueError(f"Cannot find class definition for {self.target_class_name}")
515
561
  class_name = cd.name
516
562
  result = self.find(**kwargs)
517
563
  for obj in result.rows:
564
+ obj = clean_empties(obj)
518
565
  yield from validator.iter_results(obj, class_name)
@@ -76,6 +76,10 @@ class DatabaseConfig(BaseModel):
76
76
  default=None,
77
77
  description="Optional configuration for search fields",
78
78
  )
79
+ ensure_referential_integrity: bool = Field(
80
+ default=False,
81
+ description="Whether to ensure referential integrity",
82
+ )
79
83
 
80
84
 
81
85
  class ClientConfig(BaseModel):
@@ -1,11 +1,12 @@
1
1
  import logging
2
2
  from abc import ABC
3
+ from collections import defaultdict
3
4
  from copy import copy
4
5
  from pathlib import Path
5
6
  from typing import TYPE_CHECKING, ClassVar, Dict, Iterator, Optional, Sequence, Type, Union
6
7
 
7
8
  try:
8
- from linkml.validator.report import ValidationResult
9
+ from linkml.validator.report import Severity, ValidationResult
9
10
  except ImportError:
10
11
  ValidationResult = None
11
12
 
@@ -204,7 +205,6 @@ class Database(ABC):
204
205
  """
205
206
  if not name:
206
207
  raise ValueError(f"Collection name must be provided: alias: {alias} metadata: {metadata}")
207
- # collection_cls = self._collection_class
208
208
  collection_cls = self.collection_class
209
209
  collection = collection_cls(name=name, alias=alias, parent=self, metadata=metadata)
210
210
  if metadata and metadata.attributes:
@@ -341,14 +341,42 @@ class Database(ABC):
341
341
  self._schema_view = self.induce_schema_view()
342
342
  return self._schema_view
343
343
 
344
- def set_schema_view(self, schema_view: SchemaView):
344
+ def set_schema_view(self, schema_view: Union[str, Path, SchemaView]):
345
345
  """
346
346
  Set the schema view for the database.
347
347
 
348
348
  :param schema_view:
349
349
  :return:
350
350
  """
351
+ if isinstance(schema_view, Path):
352
+ schema_view = str(schema_view)
353
+ if isinstance(schema_view, str):
354
+ schema_view = SchemaView(schema_view)
351
355
  self._schema_view = schema_view
356
+ if not self._collections:
357
+ return
358
+ # align with induced schema
359
+ roots = [c for c in schema_view.all_classes().values() if c.tree_root]
360
+ if len(roots) == 0:
361
+ all_ranges = set()
362
+ for cn in schema_view.all_classes():
363
+ for slot in schema_view.class_induced_slots(cn):
364
+ if slot.range:
365
+ all_ranges.add(slot.range)
366
+ roots = [
367
+ c
368
+ for c in schema_view.all_classes().values()
369
+ if not all_ranges.intersection(schema_view.class_ancestors(c.name, reflexive=True))
370
+ ]
371
+ if len(roots) == 1:
372
+ root = roots[0]
373
+ for slot in schema_view.class_induced_slots(root.name):
374
+ inlined = slot.inlined or slot.inlined_as_list
375
+ if inlined and slot.range:
376
+ if slot.name in self._collections:
377
+ coll = self._collections[slot.name]
378
+ if not coll.metadata.type:
379
+ coll.metadata.type = slot.range
352
380
 
353
381
  def load_schema_view(self, path: Union[str, Path]):
354
382
  """
@@ -397,6 +425,52 @@ class Database(ABC):
397
425
  """
398
426
  for collection in self.list_collections():
399
427
  yield from collection.iter_validate_collection(**kwargs)
428
+ if self.metadata.ensure_referential_integrity:
429
+ yield from self._validate_referential_integrity(**kwargs)
430
+
431
+ def _validate_referential_integrity(self, **kwargs) -> Iterator["ValidationResult"]:
432
+ """
433
+ Validate referential integrity of the database.
434
+
435
+ :param kwargs:
436
+ :return: iterator over validation results
437
+ """
438
+ sv = self.schema_view
439
+ cmap = defaultdict(list)
440
+ for collection in self.list_collections():
441
+ if not collection.target_class_name:
442
+ raise ValueError(f"Collection {collection.name} has no target class")
443
+ cmap[collection.target_class_name].append(collection)
444
+ for collection in self.list_collections():
445
+ cd = collection.class_definition()
446
+ induced_slots = sv.class_induced_slots(cd.name)
447
+ slot_map = {s.name: s for s in induced_slots}
448
+ # rmap = {s.name: s.range for s in induced_slots}
449
+ sr_to_coll = {s.name: cmap.get(s.range, []) for s in induced_slots if s.range}
450
+ for obj in collection.find_iter():
451
+ for k, v in obj.items():
452
+ if k not in sr_to_coll:
453
+ continue
454
+ ref_colls = sr_to_coll[k]
455
+ if not ref_colls:
456
+ continue
457
+ if not isinstance(v, (str, int)):
458
+ continue
459
+ slot = slot_map[k]
460
+ found = False
461
+ for ref_coll in ref_colls:
462
+ ref_obj = ref_coll.get_one(v)
463
+ if ref_obj:
464
+ found = True
465
+ break
466
+ if not found:
467
+ yield ValidationResult(
468
+ type="ReferentialIntegrity",
469
+ severity=Severity.ERROR,
470
+ message=f"Referential integrity error: {slot.range} not found",
471
+ instantiates=slot.range,
472
+ instance=v,
473
+ )
400
474
 
401
475
  def drop(self, **kwargs):
402
476
  """
@@ -0,0 +1,3 @@
1
+ """
2
+ Support for ChromaDB is experimental.
3
+ """
@@ -1,3 +1,7 @@
1
+ """
2
+ ChromaDB Collection
3
+ """
4
+
1
5
  import logging
2
6
  from typing import Any, Dict, List, Optional, Tuple, Union
3
7
 
@@ -13,6 +17,9 @@ logger = logging.getLogger(__name__)
13
17
 
14
18
 
15
19
  class ChromaDBCollection(Collection):
20
+ """
21
+ A wrapper for ChromaDB collections.
22
+ """
16
23
 
17
24
  @property
18
25
  def native_collection(self) -> ChromaCollection:
@@ -50,7 +57,7 @@ class ChromaDBCollection(Collection):
50
57
  return len(ids)
51
58
 
52
59
  def delete_where(self, where: Optional[Dict[str, Any]] = None, missing_ok=True, **kwargs) -> int:
53
- logger.info(f"Deleting from {self._target_class_name} where: {where}")
60
+ logger.info(f"Deleting from {self.target_class_name} where: {where}")
54
61
  if where is None:
55
62
  where = {}
56
63
  results = self.native_collection.get(where=where)
@@ -0,0 +1,7 @@
1
+ from linkml_store.api.stores.duckdb.duckdb_collection import DuckDBCollection
2
+ from linkml_store.api.stores.duckdb.duckdb_database import DuckDBDatabase
3
+
4
+ __all__ = [
5
+ "DuckDBCollection",
6
+ "DuckDBDatabase",
7
+ ]
@@ -28,7 +28,7 @@ class DuckDBCollection(Collection):
28
28
  cd = self.induce_class_definition_from_objects(objs)
29
29
  self._create_table(cd)
30
30
  table = self._sqla_table(cd)
31
- logger.info(f"Inserting into: {self._alias} // T={table.name}")
31
+ logger.info(f"Inserting into: {self.alias} // T={table.name}")
32
32
  engine = self.parent.engine
33
33
  col_names = [c.name for c in table.columns]
34
34
  objs = [{k: obj.get(k, None) for k in col_names} for obj in objs]
@@ -55,12 +55,12 @@ class DuckDBCollection(Collection):
55
55
  return len(objs)
56
56
 
57
57
  def delete_where(self, where: Optional[Dict[str, Any]] = None, missing_ok=True, **kwargs) -> int:
58
- logger.info(f"Deleting from {self._target_class_name} where: {where}")
58
+ logger.info(f"Deleting from {self.target_class_name} where: {where}")
59
59
  if where is None:
60
60
  where = {}
61
61
  cd = self.class_definition()
62
62
  if not cd:
63
- logger.info(f"No class definition found for {self._target_class_name}, assuming not prepopulated")
63
+ logger.info(f"No class definition found for {self.target_class_name}, assuming not prepopulated")
64
64
  return 0
65
65
  table = self._sqla_table(cd)
66
66
  engine = self.parent.engine
@@ -115,7 +115,7 @@ class DuckDBCollection(Collection):
115
115
  typ = sqla.ARRAY(typ, dimensions=1)
116
116
  col = Column(att.name, typ)
117
117
  cols.append(col)
118
- t = Table(self._alias, metadata_obj, *cols)
118
+ t = Table(self.alias, metadata_obj, *cols)
119
119
  return t
120
120
 
121
121
  def _create_table(self, cd: ClassDefinition):
@@ -123,7 +123,7 @@ class DuckDBCollection(Collection):
123
123
  logger.info(f"Already have table for: {cd.name}")
124
124
  return
125
125
  query = Query(
126
- from_table="information_schema.tables", where_clause={"table_type": "BASE TABLE", "table_name": self._alias}
126
+ from_table="information_schema.tables", where_clause={"table_type": "BASE TABLE", "table_name": self.alias}
127
127
  )
128
128
  qr = self.parent.query(query)
129
129
  if qr.num_rows > 0:
@@ -73,12 +73,12 @@ class DuckDBDatabase(Database):
73
73
  if sv:
74
74
  cd = None
75
75
  for c in self._collections.values():
76
- if c.name == query.from_table:
76
+ if c.name == query.from_table or c.metadata.alias == query.from_table:
77
77
  cd = c.class_definition()
78
78
  break
79
79
  if cd:
80
- for att in cd.attributes.values():
81
- if att.inlined:
80
+ for att in sv.class_induced_slots(cd.name):
81
+ if att.inlined or att.inlined_as_list:
82
82
  json_encoded_cols.append(att.name)
83
83
  with self.engine.connect() as conn:
84
84
  count_query_str = text(query_to_sql(query, count=True))
@@ -46,7 +46,7 @@ class HDF5Collection(Collection):
46
46
  return count
47
47
 
48
48
  def delete_where(self, where: Optional[Dict[str, Any]] = None, missing_ok=True, **kwargs) -> int:
49
- logger.info(f"Deleting from {self._target_class_name} where: {where}")
49
+ logger.info(f"Deleting from {self.target_class_name} where: {where}")
50
50
  if where is None:
51
51
  where = {}
52
52
  results = self.query(Query(where_clause=where)).rows
@@ -1,4 +1,5 @@
1
1
  import logging
2
+ from copy import copy
2
3
  from typing import Any, Dict, List, Optional, Tuple, Union
3
4
 
4
5
  from linkml_runtime.linkml_model import SlotDefinition
@@ -31,7 +32,12 @@ class MongoDBCollection(Collection):
31
32
  else:
32
33
  cursor = self.mongo_collection.find(mongo_filter)
33
34
 
34
- rows = list(cursor)
35
+ def _as_row(row: dict):
36
+ row = copy(row)
37
+ del row["_id"]
38
+ return row
39
+
40
+ rows = [_as_row(row) for row in cursor]
35
41
  count = self.mongo_collection.count_documents(mongo_filter)
36
42
 
37
43
  return QueryResult(query=query, num_rows=count, rows=rows)
@@ -92,7 +98,7 @@ class MongoDBCollection(Collection):
92
98
  return result.deleted_count
93
99
 
94
100
  def delete_where(self, where: Optional[Dict[str, Any]] = None, missing_ok=True, **kwargs) -> int:
95
- logger.info(f"Deleting from {self._target_class_name} where: {where}")
101
+ logger.info(f"Deleting from {self.target_class_name} where: {where}")
96
102
  if where is None:
97
103
  where = {}
98
104
  result = self.mongo_collection.delete_many(where)
@@ -0,0 +1,3 @@
1
+ """
2
+ Wrapper for Solr endpoints.
3
+ """
@@ -5,6 +5,7 @@ from copy import copy
5
5
  from typing import Any, Dict, List, Optional, Union
6
6
 
7
7
  import requests
8
+
8
9
  from linkml_store.api import Collection
9
10
  from linkml_store.api.collection import DEFAULT_FACET_LIMIT
10
11
  from linkml_store.api.queries import Query, QueryResult
@@ -119,7 +120,7 @@ class SolrCollection(Collection):
119
120
  conditions = []
120
121
  if self.parent.metadata.collection_type_slot:
121
122
  where_clause = copy(where_clause)
122
- where_clause[self.parent.metadata.collection_type_slot] = self._alias
123
+ where_clause[self.parent.metadata.collection_type_slot] = self.alias
123
124
  for field, value in where_clause.items():
124
125
  if not isinstance(value, (list, tuple)):
125
126
  value = [value]
@@ -2,6 +2,7 @@ import logging
2
2
  from typing import Optional
3
3
 
4
4
  import requests
5
+
5
6
  from linkml_store.api import Collection, Database
6
7
  from linkml_store.api.config import CollectionConfig
7
8
  from linkml_store.api.queries import Query, QueryResult
@@ -1,6 +1,6 @@
1
1
  import json
2
2
  from copy import deepcopy
3
- from typing import Any, Dict, Union
3
+ from typing import Any, Dict, List, Union
4
4
 
5
5
  from pydantic import BaseModel
6
6
 
@@ -71,3 +71,11 @@ def parse_update_expression(expr: str) -> Union[tuple[str, Any], None]:
71
71
  except ValueError:
72
72
  return None
73
73
  return path, val
74
+
75
+
76
+ def clean_empties(value: Union[Dict, List]) -> Any:
77
+ if isinstance(value, dict):
78
+ value = {k: v for k, v in ((k, clean_empties(v)) for k, v in value.items()) if v is not None}
79
+ elif isinstance(value, list):
80
+ value = [v for v in (clean_empties(v) for v in value) if v is not None]
81
+ return value
File without changes
File without changes
File without changes