linkml-store 0.1.6__py3-none-any.whl → 0.1.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of linkml-store might be problematic. Click here for more details.

Files changed (33) hide show
  1. linkml_store/api/client.py +32 -3
  2. linkml_store/api/collection.py +231 -30
  3. linkml_store/api/config.py +10 -2
  4. linkml_store/api/database.py +305 -19
  5. linkml_store/api/stores/chromadb/__init__.py +7 -0
  6. linkml_store/api/stores/chromadb/chromadb_collection.py +8 -1
  7. linkml_store/api/stores/duckdb/__init__.py +16 -0
  8. linkml_store/api/stores/duckdb/duckdb_collection.py +11 -9
  9. linkml_store/api/stores/duckdb/duckdb_database.py +22 -8
  10. linkml_store/api/stores/duckdb/mappings.py +1 -0
  11. linkml_store/api/stores/filesystem/__init__.py +16 -0
  12. linkml_store/api/stores/filesystem/filesystem_collection.py +142 -0
  13. linkml_store/api/stores/filesystem/filesystem_database.py +36 -0
  14. linkml_store/api/stores/hdf5/__init__.py +7 -0
  15. linkml_store/api/stores/hdf5/hdf5_collection.py +1 -1
  16. linkml_store/api/stores/mongodb/__init__.py +25 -0
  17. linkml_store/api/stores/mongodb/mongodb_collection.py +29 -8
  18. linkml_store/api/stores/solr/__init__.py +3 -0
  19. linkml_store/api/stores/solr/solr_collection.py +2 -1
  20. linkml_store/api/stores/solr/solr_database.py +1 -0
  21. linkml_store/cli.py +64 -10
  22. linkml_store/index/__init__.py +6 -2
  23. linkml_store/index/implementations/llm_indexer.py +83 -5
  24. linkml_store/index/implementations/simple_indexer.py +2 -2
  25. linkml_store/index/indexer.py +32 -8
  26. linkml_store/utils/format_utils.py +52 -2
  27. linkml_store/utils/object_utils.py +9 -1
  28. {linkml_store-0.1.6.dist-info → linkml_store-0.1.8.dist-info}/METADATA +4 -1
  29. linkml_store-0.1.8.dist-info/RECORD +45 -0
  30. linkml_store-0.1.6.dist-info/RECORD +0 -41
  31. {linkml_store-0.1.6.dist-info → linkml_store-0.1.8.dist-info}/LICENSE +0 -0
  32. {linkml_store-0.1.6.dist-info → linkml_store-0.1.8.dist-info}/WHEEL +0 -0
  33. {linkml_store-0.1.6.dist-info → linkml_store-0.1.8.dist-info}/entry_points.txt +0 -0
@@ -1,20 +1,34 @@
1
+ import logging
2
+ from pathlib import Path
1
3
  from typing import TYPE_CHECKING, List
2
4
 
3
5
  import numpy as np
4
6
 
7
+ from linkml_store.api.config import CollectionConfig
5
8
  from linkml_store.index.indexer import INDEX_ITEM, Indexer
6
9
 
7
10
  if TYPE_CHECKING:
8
11
  import llm
9
12
 
10
13
 
14
+ logger = logging.getLogger(__name__)
15
+
16
+
11
17
  class LLMIndexer(Indexer):
12
18
  """
13
- A implementations index wraps the llm library
19
+ An indexer that wraps the llm library.
20
+
21
+ This indexer is used to convert text to vectors using the llm library.
22
+
23
+ >>> indexer = LLMIndexer(cached_embeddings_database="tests/input/llm_cache.db")
24
+ >>> vector = indexer.text_to_vector("hello")
14
25
  """
15
26
 
16
27
  embedding_model_name: str = "ada-002"
17
28
  _embedding_model: "llm.EmbeddingModel" = None
29
+ cached_embeddings_database: str = None
30
+ cached_embeddings_collection: str = None
31
+ cache_queries: bool = False
18
32
 
19
33
  @property
20
34
  def embedding_model(self):
@@ -24,21 +38,85 @@ class LLMIndexer(Indexer):
24
38
  self._embedding_model = llm.get_embedding_model(self.embedding_model_name)
25
39
  return self._embedding_model
26
40
 
27
- def text_to_vector(self, text: str) -> INDEX_ITEM:
41
+ def text_to_vector(self, text: str, cache: bool = None, **kwargs) -> INDEX_ITEM:
28
42
  """
29
43
  Convert a text to an indexable object
30
44
 
45
+ >>> indexer = LLMIndexer(cached_embeddings_database="tests/input/llm_cache.db")
46
+ >>> vector = indexer.text_to_vector("hello")
47
+
31
48
  :param text:
32
49
  :return:
33
50
  """
34
- return self.texts_to_vectors([text])[0]
51
+ return self.texts_to_vectors([text], cache=cache, **kwargs)[0]
35
52
 
36
- def texts_to_vectors(self, texts: List[str]) -> List[INDEX_ITEM]:
53
+ def texts_to_vectors(self, texts: List[str], cache: bool = None, **kwargs) -> List[INDEX_ITEM]:
37
54
  """
38
55
  Use LLM to embed
39
56
 
57
+ >>> indexer = LLMIndexer(cached_embeddings_database="tests/input/llm_cache.db")
58
+ >>> vectors = indexer.texts_to_vectors(["hello", "goodbye"])
59
+
40
60
  :param texts:
41
61
  :return:
42
62
  """
43
- embeddings = self.embedding_model.embed_multi(texts)
63
+ logging.info(f"Converting {len(texts)} texts to vectors")
64
+ model = self.embedding_model
65
+ if self.cached_embeddings_database and (cache is None or cache or self.cache_queries):
66
+ model_id = model.model_id
67
+ if not model_id:
68
+ raise ValueError("Model ID is required to cache embeddings")
69
+ db_path = Path(self.cached_embeddings_database)
70
+ coll_name = self.cached_embeddings_collection
71
+ if not coll_name:
72
+ coll_name = "all_embeddings"
73
+ from linkml_store import Client
74
+
75
+ embeddings_client = Client()
76
+ config = CollectionConfig(
77
+ name=coll_name,
78
+ type="Embeddings",
79
+ attributes={
80
+ "text": {"range": "string"},
81
+ "model_id": {"range": "string"},
82
+ "embedding": {"range": "float", "array": {}},
83
+ },
84
+ )
85
+ embeddings_db = embeddings_client.get_database(f"duckdb:///{db_path}")
86
+ if coll_name in embeddings_db.list_collection_names():
87
+ # Load existing collection and use its model
88
+ embeddings_collection = embeddings_db.create_collection(coll_name, metadata=config)
89
+ else:
90
+ embeddings_collection = embeddings_db.create_collection(coll_name, metadata=config)
91
+ texts = list(texts)
92
+ embeddings = list([None] * len(texts))
93
+ uncached_texts = []
94
+ n = 0
95
+ for i in range(len(texts)):
96
+ # TODO: optimize this
97
+ text = texts[i]
98
+ logger.info(f"Looking for cached embedding for {text}")
99
+ r = embeddings_collection.find({"text": text, "model_id": model_id})
100
+ if r.num_rows:
101
+ embeddings[i] = r.rows[0]["embedding"]
102
+ n += 1
103
+ logger.info("Found")
104
+ else:
105
+ uncached_texts.append((text, i))
106
+ logger.info("NOT Found")
107
+ logger.info(f"Found {n} cached embeddings")
108
+ if uncached_texts:
109
+ logger.info(f"Embedding {len(uncached_texts)} uncached texts")
110
+ uncached_texts, uncached_indices = zip(*uncached_texts)
111
+ uncached_embeddings = list(model.embed_multi(uncached_texts))
112
+ # TODO: combine into a single insert with multiple rows
113
+ for i, index in enumerate(uncached_indices):
114
+ logger.debug(f"Indexing text at {i}")
115
+ embeddings[index] = uncached_embeddings[i]
116
+ embeddings_collection.insert(
117
+ {"text": uncached_texts[i], "embedding": embeddings[index], "model_id": model_id}
118
+ )
119
+ else:
120
+ logger.info(f"Embedding {len(texts)} texts")
121
+ embeddings = model.embed_multi(texts)
44
122
  return [np.array(v, dtype=float) for v in embeddings]
@@ -15,7 +15,7 @@ class SimpleIndexer(Indexer):
15
15
  This uses a naive method to generate an index from text. It is not suitable for production use.
16
16
  """
17
17
 
18
- def text_to_vector(self, text: str) -> INDEX_ITEM:
18
+ def text_to_vector(self, text: str, cache: bool = None, **kwargs) -> INDEX_ITEM:
19
19
  """
20
20
  This is a naive method purely for testing
21
21
 
@@ -39,5 +39,5 @@ class SimpleIndexer(Indexer):
39
39
 
40
40
  # Increment the count at the computed index
41
41
  vector[index] += 1.0
42
- logger.info(f"Indexed text: {text} as {vector}")
42
+ logger.debug(f"Indexed text: {text} as {vector}")
43
43
  return vector
@@ -1,3 +1,5 @@
1
+ import logging
2
+ from enum import Enum
1
3
  from typing import Any, Callable, Dict, List, Optional, Tuple
2
4
 
3
5
  import numpy as np
@@ -5,6 +7,13 @@ from pydantic import BaseModel
5
7
 
6
8
  INDEX_ITEM = np.ndarray
7
9
 
10
+ logger = logging.getLogger(__name__)
11
+
12
+
13
+ class TemplateSyntaxEnum(str, Enum):
14
+ jinja2 = "jinja2"
15
+ fstring = "fstring"
16
+
8
17
 
9
18
  def cosine_similarity(vector1, vector2):
10
19
  dot_product = np.dot(vector1, vector2)
@@ -21,8 +30,9 @@ class Indexer(BaseModel):
21
30
  name: Optional[str] = None
22
31
  index_function: Optional[Callable] = None
23
32
  distance_function: Optional[Callable] = None
24
- index_attributes: Optional[str] = None
33
+ index_attributes: Optional[List[str]] = None
25
34
  text_template: Optional[str] = None
35
+ text_template_syntax: Optional[TemplateSyntaxEnum] = None
26
36
  filter_nulls: Optional[bool] = True
27
37
  vector_default_length: Optional[int] = 1000
28
38
  index_field: Optional[str] = "__index__"
@@ -41,24 +51,25 @@ class Indexer(BaseModel):
41
51
  Convert a list of objects to indexable objects
42
52
 
43
53
  :param objs:
44
- :return:
54
+ :return: list of vectors
45
55
  """
46
- return [self.object_to_vector(obj) for obj in objs]
56
+ return self.texts_to_vectors([self.object_to_text(obj) for obj in objs])
47
57
 
48
- def texts_to_vectors(self, texts: List[str]) -> List[INDEX_ITEM]:
58
+ def texts_to_vectors(self, texts: List[str], cache: bool = None, **kwargs) -> List[INDEX_ITEM]:
49
59
  """
50
60
  Convert a list of texts to indexable objects
51
61
 
52
62
  :param texts:
53
63
  :return:
54
64
  """
55
- return [self.text_to_vector(text) for text in texts]
65
+ return [self.text_to_vector(text, cache=cache, **kwargs) for text in texts]
56
66
 
57
- def text_to_vector(self, text: str) -> INDEX_ITEM:
67
+ def text_to_vector(self, text: str, cache: bool = None, **kwargs) -> INDEX_ITEM:
58
68
  """
59
69
  Convert a text to an indexable object
60
70
 
61
71
  :param text:
72
+ :param cache:
62
73
  :return:
63
74
  """
64
75
  raise NotImplementedError
@@ -71,11 +82,24 @@ class Indexer(BaseModel):
71
82
  :return:
72
83
  """
73
84
  if self.index_attributes:
85
+ if len(self.index_attributes) == 1 and not self.text_template:
86
+ return str(obj[self.index_attributes[0]])
74
87
  obj = {k: v for k, v in obj.items() if k in self.index_attributes}
75
88
  if self.filter_nulls:
76
89
  obj = {k: v for k, v in obj.items() if v is not None}
77
90
  if self.text_template:
78
- return self.text_template.format(**obj)
91
+ syntax = self.text_template_syntax
92
+ if not syntax:
93
+ if "{%" in self.text_template or "{{" in self.text_template:
94
+ logger.info("Detected Jinja2 syntax in text template")
95
+ syntax = TemplateSyntaxEnum.jinja2
96
+ if syntax and syntax == TemplateSyntaxEnum.jinja2:
97
+ from jinja2 import Template
98
+
99
+ template = Template(self.text_template)
100
+ return template.render(**obj)
101
+ else:
102
+ return self.text_template.format(**obj)
79
103
  return str(obj)
80
104
 
81
105
  def search(
@@ -91,7 +115,7 @@ class Indexer(BaseModel):
91
115
  """
92
116
 
93
117
  # Convert the query string to a vector
94
- query_vector = self.text_to_vector(query)
118
+ query_vector = self.text_to_vector(query, cache=False)
95
119
 
96
120
  distances = []
97
121
 
@@ -4,13 +4,17 @@ import sys
4
4
  from enum import Enum
5
5
  from io import StringIO
6
6
  from pathlib import Path
7
- from typing import Any, Dict, List, Union
7
+ from typing import Any, Dict, List, Optional, Union
8
8
 
9
9
  import yaml
10
10
  from pydantic import BaseModel
11
11
 
12
12
 
13
13
  class Format(Enum):
14
+ """
15
+ Supported generic file formats for loading and rendering objects.
16
+ """
17
+
14
18
  JSON = "json"
15
19
  JSONL = "jsonl"
16
20
  YAML = "yaml"
@@ -22,6 +26,10 @@ def load_objects(file_path: Union[str, Path], format: Union[Format, str] = None)
22
26
  """
23
27
  Load objects from a file in JSON, JSONLines, YAML, CSV, or TSV format.
24
28
 
29
+ >>> load_objects("tests/input/test_data/data.csv")
30
+ [{'id': '1', 'name': 'John', 'age': '30'},
31
+ {'id': '2', 'name': 'Alice', 'age': '25'}, {'id': '3', 'name': 'Bob', 'age': '35'}]
32
+
25
33
  :param file_path: The path to the file.
26
34
  :param format: The format of the file. Can be a Format enum or a string value.
27
35
  :return: A list of dictionaries representing the loaded objects.
@@ -57,10 +65,23 @@ def load_objects(file_path: Union[str, Path], format: Union[Format, str] = None)
57
65
  return objs
58
66
 
59
67
 
60
- def render_output(data: List[Dict[str, Any]], format: Union[Format, str] = Format.YAML) -> str:
68
+ def render_output(data: Union[List[Dict[str, Any]], Dict[str, Any]], format: Union[Format, str] = Format.YAML) -> str:
61
69
  """
62
70
  Render output data in JSON, JSONLines, YAML, CSV, or TSV format.
63
71
 
72
+ >>> print(render_output([{"a": 1, "b": 2}, {"a": 3, "b": 4}], Format.JSON))
73
+ [
74
+ {
75
+ "a": 1,
76
+ "b": 2
77
+ },
78
+ {
79
+ "a": 3,
80
+ "b": 4
81
+ }
82
+ ]
83
+
84
+
64
85
  :param data: The data to be rendered.
65
86
  :param format: The desired output format. Can be a Format enum or a string value.
66
87
  :return: The rendered output as a string.
@@ -91,3 +112,32 @@ def render_output(data: List[Dict[str, Any]], format: Union[Format, str] = Forma
91
112
  return output.getvalue()
92
113
  else:
93
114
  raise ValueError(f"Unsupported output format: {format}")
115
+
116
+
117
+ def guess_format(path: str) -> Optional[Format]:
118
+ """
119
+ Guess the format of a file based on its extension.
120
+
121
+ >>> guess_format("data.json")
122
+ <Format.JSON: 'json'>
123
+ >>> guess_format("data.jsonl")
124
+ <Format.JSONL: 'jsonl'>
125
+ >>> guess_format("data.yaml")
126
+ <Format.YAML: 'yaml'>
127
+ >>> assert not guess_format("data")
128
+
129
+ :param path: The path to the file.
130
+ :return: The guessed format.
131
+ """
132
+ if path.endswith(".json"):
133
+ return Format.JSON
134
+ elif path.endswith(".jsonl"):
135
+ return Format.JSONL
136
+ elif path.endswith(".yaml") or path.endswith(".yml"):
137
+ return Format.YAML
138
+ elif path.endswith(".tsv"):
139
+ return Format.TSV
140
+ elif path.endswith(".csv"):
141
+ return Format.CSV
142
+ else:
143
+ return None
@@ -1,6 +1,6 @@
1
1
  import json
2
2
  from copy import deepcopy
3
- from typing import Any, Dict, Union
3
+ from typing import Any, Dict, List, Union
4
4
 
5
5
  from pydantic import BaseModel
6
6
 
@@ -71,3 +71,11 @@ def parse_update_expression(expr: str) -> Union[tuple[str, Any], None]:
71
71
  except ValueError:
72
72
  return None
73
73
  return path, val
74
+
75
+
76
+ def clean_empties(value: Union[Dict, List]) -> Any:
77
+ if isinstance(value, dict):
78
+ value = {k: v for k, v in ((k, clean_empties(v)) for k, v in value.items()) if v is not None}
79
+ elif isinstance(value, list):
80
+ value = [v for v in (clean_empties(v) for v in value) if v is not None]
81
+ return value
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: linkml-store
3
- Version: 0.1.6
3
+ Version: 0.1.8
4
4
  Summary: linkml-store
5
5
  License: MIT
6
6
  Author: Author 1
@@ -27,6 +27,7 @@ Requires-Dist: click
27
27
  Requires-Dist: duckdb (>=0.10.1,<0.11.0)
28
28
  Requires-Dist: duckdb-engine (>=0.11.2)
29
29
  Requires-Dist: h5py ; extra == "h5py"
30
+ Requires-Dist: jinja2 (>=3.1.4,<4.0.0)
30
31
  Requires-Dist: linkml ; extra == "validation"
31
32
  Requires-Dist: linkml-runtime (>=1.7.5,<2.0.0)
32
33
  Requires-Dist: linkml_map ; extra == "map"
@@ -53,3 +54,5 @@ There is also experimental support for vector-based indexing using OpenAI test e
53
54
  The goals of this project are to provide high level access to data stored in heterogeneous databases,
54
55
  with optional schema management using LinkML.
55
56
 
57
+ See [these slides](https://docs.google.com/presentation/d/e/2PACX-1vSgtWUNUW0qNO_ZhMAGQ6fYhlXZJjBNMYT0OiZz8DDx8oj7iG9KofRs6SeaMXBBOICGknoyMG2zaHnm/embed?start=false&loop=false&delayms=3000) for more details
58
+
@@ -0,0 +1,45 @@
1
+ linkml_store/__init__.py,sha256=jlU6WOUAn8cKIhzbTULmBTWpW9gZdEt7q_RI6KZN1bY,118
2
+ linkml_store/api/__init__.py,sha256=3CelcFEFz0y3MkQAzhQ9JxHIt1zFk6nYZxSmYTo8YZE,226
3
+ linkml_store/api/client.py,sha256=0Cs_0xW3NDUI6UJFtVvWd_XsGVLryMyh1SPrQhH4taU,8474
4
+ linkml_store/api/collection.py,sha256=Nxauj22Vh38RaT6Hhwyn5j1_SXoHDf-EaoIy1osnIKo,24010
5
+ linkml_store/api/config.py,sha256=VTtv42Vgnn3vKEERIOrtXjQDz4YHTJnm5pvvUnshqyA,3573
6
+ linkml_store/api/database.py,sha256=OASaP8lbGIIJnrjTDDHq4ROzKrND8cizHauKMNFZaNo,24876
7
+ linkml_store/api/queries.py,sha256=w0qnNeCH6pC9WTGoEQYd300MF6o0G3atz2YxN3WecAs,2028
8
+ linkml_store/api/stores/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
9
+ linkml_store/api/stores/chromadb/__init__.py,sha256=e9BkOPuPnVQKA5PRKDulag59yGNHDP3U2_DnPSrFAKM,132
10
+ linkml_store/api/stores/chromadb/chromadb_collection.py,sha256=RQUZx5oeotkzNihg-dlSevkiTiKY1d9x0bS63HF80W4,4270
11
+ linkml_store/api/stores/chromadb/chromadb_database.py,sha256=dZA3LQE8-ZMhJQOzsUFyxehnKpFF7adR182aggfkaFY,3205
12
+ linkml_store/api/stores/duckdb/__init__.py,sha256=rbQSDgNg-fdvi6-pHGYkJTST4p1qXUZBf9sFSsO3KPk,387
13
+ linkml_store/api/stores/duckdb/duckdb_collection.py,sha256=poKh6g9-sXA8Wl6B9-mxJqk9biJ0IK7_xz2iGG_U79g,6060
14
+ linkml_store/api/stores/duckdb/duckdb_database.py,sha256=xk2bSeSWofGtPgp56168-4KIX4wJTtm9XgUtiBQOuNA,7194
15
+ linkml_store/api/stores/duckdb/mappings.py,sha256=tDce3W1Apwammhf4LS6cRJ0m4NiJ0eB7vOI_4U5ETY8,148
16
+ linkml_store/api/stores/filesystem/__init__.py,sha256=rbQSDgNg-fdvi6-pHGYkJTST4p1qXUZBf9sFSsO3KPk,387
17
+ linkml_store/api/stores/filesystem/filesystem_collection.py,sha256=vIM8y71YNdjBsSUl02wXMdl6EY3QYjMCT5MzRI7pO04,5905
18
+ linkml_store/api/stores/filesystem/filesystem_database.py,sha256=d-394QD565bHqJ2FzZjMZrFw8HY-v7k8bE2cxy24k-8,1098
19
+ linkml_store/api/stores/hdf5/__init__.py,sha256=l4cIh3v7P0nPbwGIsfuCMD_serQ8q8c7iuUA9W2Jb4o,97
20
+ linkml_store/api/stores/hdf5/hdf5_collection.py,sha256=mnpLMYehn3PuaIjp2dXrIWu8jh-bdQ84X2Ku83jMdEY,3805
21
+ linkml_store/api/stores/hdf5/hdf5_database.py,sha256=EZbjrpaqiNDEFvoD5dZNcGBXA8z6HRNL81emueTZWNw,2714
22
+ linkml_store/api/stores/mongodb/__init__.py,sha256=OSFCr7RQlDEe-O-Y0P_i912oAMK-L3pC7Cnj7sxlwAk,510
23
+ linkml_store/api/stores/mongodb/mongodb_collection.py,sha256=wMaZb9B3PZ5lCDfj8kOrVnUJQ0O0lrP4PrcpEwAhEOI,4698
24
+ linkml_store/api/stores/mongodb/mongodb_database.py,sha256=QAdTi8XYLsdrEvEUUKb9qolCPeEXgfecTQ1bz9GCWDg,3670
25
+ linkml_store/api/stores/solr/__init__.py,sha256=aAfnaN9mZOiIDj1NYz0Ll9fZF2gG7UU_vhP4SNCL2d8,36
26
+ linkml_store/api/stores/solr/solr_collection.py,sha256=8GmxErlWFOO0NnJiYo1Q7hegxCsfxaWc79eN7Jn02gA,4723
27
+ linkml_store/api/stores/solr/solr_database.py,sha256=TFjqbY7jAkdrhAchbNg0E-mChSP7ogNwFExslbvX7Yo,2877
28
+ linkml_store/api/stores/solr/solr_utils.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
29
+ linkml_store/cli.py,sha256=oMz13ndmFm3QCxg1ILPo0mlwtMgfd-tERXmW2LM7jzU,15871
30
+ linkml_store/constants.py,sha256=x4ZmDsfE9rZcL5WpA93uTKrRWzCD6GodYXviVzIvR38,112
31
+ linkml_store/index/__init__.py,sha256=dVrYtsFu7tEsMDaPMIc2LjsM76HoAKQz4uO8roR18Zw,993
32
+ linkml_store/index/implementations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
33
+ linkml_store/index/implementations/llm_indexer.py,sha256=04cz-UaKlV7WTubHVr0v94XfqbN0N2Nnoud9Z7nETH4,4779
34
+ linkml_store/index/implementations/simple_indexer.py,sha256=KnkFJtXTHnwjhD_D6ZK2rFhBID1dgCedcOVPEWAY2NU,1282
35
+ linkml_store/index/indexer.py,sha256=C64J3-2oCqYXCouNQ4fxw4g7f7Nl2L0WwFf-zrcsogo,4491
36
+ linkml_store/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
37
+ linkml_store/utils/format_utils.py,sha256=18qmWVUr6ceOuvPROcNZ7mCBXpZyj3_cy1gNcpIqZC4,4352
38
+ linkml_store/utils/io.py,sha256=JHUrWDtlZC2jtN_PQZ4ypdGIyYlftZEN3JaCvEPs44w,884
39
+ linkml_store/utils/object_utils.py,sha256=is6T2gruvVKvWD5ZntcAl6Qi3L154FObEho_b_crTuE,2539
40
+ linkml_store/utils/sql_utils.py,sha256=TeAhAHXi1GA0f2UVrxbzStwe49Q7fN0mu5WZyfDk-s8,5651
41
+ linkml_store-0.1.8.dist-info/LICENSE,sha256=77mDOslUnalYnuq9xQYZKtIoNEzcH9mIjvWHOKjamnE,1086
42
+ linkml_store-0.1.8.dist-info/METADATA,sha256=EN8yT2duWya5DiUQKHx99XAjCqJfYOw2_XTQiltTLzo,2311
43
+ linkml_store-0.1.8.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
44
+ linkml_store-0.1.8.dist-info/entry_points.txt,sha256=6ema3OkAkUK0ux8roEeRPtSW_Tylend5BABf-xRsZiU,53
45
+ linkml_store-0.1.8.dist-info/RECORD,,
@@ -1,41 +0,0 @@
1
- linkml_store/__init__.py,sha256=jlU6WOUAn8cKIhzbTULmBTWpW9gZdEt7q_RI6KZN1bY,118
2
- linkml_store/api/__init__.py,sha256=3CelcFEFz0y3MkQAzhQ9JxHIt1zFk6nYZxSmYTo8YZE,226
3
- linkml_store/api/client.py,sha256=hoiEE3FnNvCpvJ8LrEoTjOWg_jUolbbZEVIBef0Xv7c,7551
4
- linkml_store/api/collection.py,sha256=QKmiBTb9YjpRFgon1A2nurWQnIxU8uz9EPtvJT-paT8,17203
5
- linkml_store/api/config.py,sha256=qXB41OrPP3dh96XnigkJiTbAH12aPcKycSUH0MV8BHM,3269
6
- linkml_store/api/database.py,sha256=dXKImuXLOaBx4JiTDSuhCL6B4rOnIzY57Rg052Odupc,13701
7
- linkml_store/api/queries.py,sha256=w0qnNeCH6pC9WTGoEQYd300MF6o0G3atz2YxN3WecAs,2028
8
- linkml_store/api/stores/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
9
- linkml_store/api/stores/chromadb/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
- linkml_store/api/stores/chromadb/chromadb_collection.py,sha256=hiNhnUNKrsgjYZynXCkAbLssh38Xu9D59OX8ZARnWIU,4186
11
- linkml_store/api/stores/chromadb/chromadb_database.py,sha256=dZA3LQE8-ZMhJQOzsUFyxehnKpFF7adR182aggfkaFY,3205
12
- linkml_store/api/stores/duckdb/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
13
- linkml_store/api/stores/duckdb/duckdb_collection.py,sha256=CUfZAi2Z_uQ8DywXXEKu-Of3E_9i4GOvUVnq8qnhSew,5859
14
- linkml_store/api/stores/duckdb/duckdb_database.py,sha256=3Hz01lLm8-32blkITlTurF4fd4VHSAvyFKYA1v_9NfM,6591
15
- linkml_store/api/stores/duckdb/mappings.py,sha256=S4MWetLpQcxOwwedXrZTqazxdaHIQXXbq4VRq9Ok4B4,123
16
- linkml_store/api/stores/hdf5/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
17
- linkml_store/api/stores/hdf5/hdf5_collection.py,sha256=B7kjbb9oOgxfp9KLP42PJXIyRlVoTuscn_OR-vNxyGc,3806
18
- linkml_store/api/stores/hdf5/hdf5_database.py,sha256=EZbjrpaqiNDEFvoD5dZNcGBXA8z6HRNL81emueTZWNw,2714
19
- linkml_store/api/stores/mongodb/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
20
- linkml_store/api/stores/mongodb/mongodb_collection.py,sha256=OW1IhDonKyG6jkb6Kyl0ZIBr1TYKw8iiXbx2dFrvQfk,3959
21
- linkml_store/api/stores/mongodb/mongodb_database.py,sha256=QAdTi8XYLsdrEvEUUKb9qolCPeEXgfecTQ1bz9GCWDg,3670
22
- linkml_store/api/stores/solr/solr_collection.py,sha256=pxT67C9OU6aaoshGZGU5ZZptLZStKOTLIBd5qGTpx2I,4723
23
- linkml_store/api/stores/solr/solr_database.py,sha256=7idTQL740r1wAvK21joUMz_ajM_qzLqdfPUADa7Fa7U,2876
24
- linkml_store/api/stores/solr/solr_utils.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
25
- linkml_store/cli.py,sha256=r6UggDS2iFC5yIhBECwn-5aKmI38SHuYIJ5TEA0-CeM,13667
26
- linkml_store/constants.py,sha256=x4ZmDsfE9rZcL5WpA93uTKrRWzCD6GodYXviVzIvR38,112
27
- linkml_store/index/__init__.py,sha256=k3fq2gzhoBv3_QEu4zMbEEoc0tEOUOcrEjKQVvWfASs,881
28
- linkml_store/index/implementations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
29
- linkml_store/index/implementations/llm_indexer.py,sha256=LymhrQIeik0Qe8P2b3y2Lg7Y0P0OJ8_WjFwv-hbULj8,1069
30
- linkml_store/index/implementations/simple_indexer.py,sha256=CwUBb_GO_JUd-f-KW6R4i26PUV251BNTZ2cE6Qo1fH4,1251
31
- linkml_store/index/indexer.py,sha256=d_QEwJ5qEx2pkNR-QJ9hAn0pyJbhZIIT83GBFKhJA6I,3462
32
- linkml_store/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
33
- linkml_store/utils/format_utils.py,sha256=lTrNVEywRziCjcrEeUC3hpZZo8yDQYgG-qBanQhBYrI,3081
34
- linkml_store/utils/io.py,sha256=JHUrWDtlZC2jtN_PQZ4ypdGIyYlftZEN3JaCvEPs44w,884
35
- linkml_store/utils/object_utils.py,sha256=8rKboygmwlk7tGgd2TffR7lfGrXXJcQ_hEpduKP_v2A,2214
36
- linkml_store/utils/sql_utils.py,sha256=TeAhAHXi1GA0f2UVrxbzStwe49Q7fN0mu5WZyfDk-s8,5651
37
- linkml_store-0.1.6.dist-info/LICENSE,sha256=77mDOslUnalYnuq9xQYZKtIoNEzcH9mIjvWHOKjamnE,1086
38
- linkml_store-0.1.6.dist-info/METADATA,sha256=NmD8dE9YlmNI6USIP1us-2sbc5yhtXRRYGjkhukScDQ,2064
39
- linkml_store-0.1.6.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
40
- linkml_store-0.1.6.dist-info/entry_points.txt,sha256=6ema3OkAkUK0ux8roEeRPtSW_Tylend5BABf-xRsZiU,53
41
- linkml_store-0.1.6.dist-info/RECORD,,