linkml-store 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (101) hide show
  1. linkml_store/__init__.py +7 -0
  2. linkml_store/api/__init__.py +8 -0
  3. linkml_store/api/client.py +414 -0
  4. linkml_store/api/collection.py +1280 -0
  5. linkml_store/api/config.py +187 -0
  6. linkml_store/api/database.py +862 -0
  7. linkml_store/api/queries.py +69 -0
  8. linkml_store/api/stores/__init__.py +0 -0
  9. linkml_store/api/stores/chromadb/__init__.py +7 -0
  10. linkml_store/api/stores/chromadb/chromadb_collection.py +121 -0
  11. linkml_store/api/stores/chromadb/chromadb_database.py +89 -0
  12. linkml_store/api/stores/dremio/__init__.py +10 -0
  13. linkml_store/api/stores/dremio/dremio_collection.py +555 -0
  14. linkml_store/api/stores/dremio/dremio_database.py +1052 -0
  15. linkml_store/api/stores/dremio/mappings.py +105 -0
  16. linkml_store/api/stores/dremio_rest/__init__.py +11 -0
  17. linkml_store/api/stores/dremio_rest/dremio_rest_collection.py +502 -0
  18. linkml_store/api/stores/dremio_rest/dremio_rest_database.py +1023 -0
  19. linkml_store/api/stores/duckdb/__init__.py +16 -0
  20. linkml_store/api/stores/duckdb/duckdb_collection.py +339 -0
  21. linkml_store/api/stores/duckdb/duckdb_database.py +283 -0
  22. linkml_store/api/stores/duckdb/mappings.py +8 -0
  23. linkml_store/api/stores/filesystem/__init__.py +15 -0
  24. linkml_store/api/stores/filesystem/filesystem_collection.py +186 -0
  25. linkml_store/api/stores/filesystem/filesystem_database.py +81 -0
  26. linkml_store/api/stores/hdf5/__init__.py +7 -0
  27. linkml_store/api/stores/hdf5/hdf5_collection.py +104 -0
  28. linkml_store/api/stores/hdf5/hdf5_database.py +79 -0
  29. linkml_store/api/stores/ibis/__init__.py +5 -0
  30. linkml_store/api/stores/ibis/ibis_collection.py +488 -0
  31. linkml_store/api/stores/ibis/ibis_database.py +328 -0
  32. linkml_store/api/stores/mongodb/__init__.py +25 -0
  33. linkml_store/api/stores/mongodb/mongodb_collection.py +379 -0
  34. linkml_store/api/stores/mongodb/mongodb_database.py +114 -0
  35. linkml_store/api/stores/neo4j/__init__.py +0 -0
  36. linkml_store/api/stores/neo4j/neo4j_collection.py +429 -0
  37. linkml_store/api/stores/neo4j/neo4j_database.py +154 -0
  38. linkml_store/api/stores/solr/__init__.py +3 -0
  39. linkml_store/api/stores/solr/solr_collection.py +224 -0
  40. linkml_store/api/stores/solr/solr_database.py +83 -0
  41. linkml_store/api/stores/solr/solr_utils.py +0 -0
  42. linkml_store/api/types.py +4 -0
  43. linkml_store/cli.py +1147 -0
  44. linkml_store/constants.py +7 -0
  45. linkml_store/graphs/__init__.py +0 -0
  46. linkml_store/graphs/graph_map.py +24 -0
  47. linkml_store/index/__init__.py +53 -0
  48. linkml_store/index/implementations/__init__.py +0 -0
  49. linkml_store/index/implementations/llm_indexer.py +174 -0
  50. linkml_store/index/implementations/simple_indexer.py +43 -0
  51. linkml_store/index/indexer.py +211 -0
  52. linkml_store/inference/__init__.py +13 -0
  53. linkml_store/inference/evaluation.py +195 -0
  54. linkml_store/inference/implementations/__init__.py +0 -0
  55. linkml_store/inference/implementations/llm_inference_engine.py +154 -0
  56. linkml_store/inference/implementations/rag_inference_engine.py +276 -0
  57. linkml_store/inference/implementations/rule_based_inference_engine.py +169 -0
  58. linkml_store/inference/implementations/sklearn_inference_engine.py +314 -0
  59. linkml_store/inference/inference_config.py +66 -0
  60. linkml_store/inference/inference_engine.py +209 -0
  61. linkml_store/inference/inference_engine_registry.py +74 -0
  62. linkml_store/plotting/__init__.py +5 -0
  63. linkml_store/plotting/cli.py +826 -0
  64. linkml_store/plotting/dimensionality_reduction.py +453 -0
  65. linkml_store/plotting/embedding_plot.py +489 -0
  66. linkml_store/plotting/facet_chart.py +73 -0
  67. linkml_store/plotting/heatmap.py +383 -0
  68. linkml_store/utils/__init__.py +0 -0
  69. linkml_store/utils/change_utils.py +17 -0
  70. linkml_store/utils/dat_parser.py +95 -0
  71. linkml_store/utils/embedding_matcher.py +424 -0
  72. linkml_store/utils/embedding_utils.py +299 -0
  73. linkml_store/utils/enrichment_analyzer.py +217 -0
  74. linkml_store/utils/file_utils.py +37 -0
  75. linkml_store/utils/format_utils.py +550 -0
  76. linkml_store/utils/io.py +38 -0
  77. linkml_store/utils/llm_utils.py +122 -0
  78. linkml_store/utils/mongodb_utils.py +145 -0
  79. linkml_store/utils/neo4j_utils.py +42 -0
  80. linkml_store/utils/object_utils.py +190 -0
  81. linkml_store/utils/pandas_utils.py +93 -0
  82. linkml_store/utils/patch_utils.py +126 -0
  83. linkml_store/utils/query_utils.py +89 -0
  84. linkml_store/utils/schema_utils.py +23 -0
  85. linkml_store/utils/sklearn_utils.py +193 -0
  86. linkml_store/utils/sql_utils.py +177 -0
  87. linkml_store/utils/stats_utils.py +53 -0
  88. linkml_store/utils/vector_utils.py +158 -0
  89. linkml_store/webapi/__init__.py +0 -0
  90. linkml_store/webapi/html/__init__.py +3 -0
  91. linkml_store/webapi/html/base.html.j2 +24 -0
  92. linkml_store/webapi/html/collection_details.html.j2 +15 -0
  93. linkml_store/webapi/html/database_details.html.j2 +16 -0
  94. linkml_store/webapi/html/databases.html.j2 +14 -0
  95. linkml_store/webapi/html/generic.html.j2 +43 -0
  96. linkml_store/webapi/main.py +855 -0
  97. linkml_store-0.3.0.dist-info/METADATA +226 -0
  98. linkml_store-0.3.0.dist-info/RECORD +101 -0
  99. linkml_store-0.3.0.dist-info/WHEEL +4 -0
  100. linkml_store-0.3.0.dist-info/entry_points.txt +3 -0
  101. linkml_store-0.3.0.dist-info/licenses/LICENSE +22 -0
@@ -0,0 +1,379 @@
1
+ import logging
2
+ from copy import copy
3
+ from typing import Any, Dict, List, Optional, Tuple, Union
4
+
5
+ from pymongo.collection import Collection as MongoCollection
6
+
7
+ from linkml_store.api import Collection
8
+ from linkml_store.api.collection import DEFAULT_FACET_LIMIT, OBJECT
9
+ from linkml_store.api.queries import Query, QueryResult
10
+ from linkml_store.utils.object_utils import object_path_get
11
+
12
+ logger = logging.getLogger(__name__)
13
+
14
+
15
+ class MongoDBCollection(Collection):
16
+ """
17
+ Adapter for collections in a MongoDB database.
18
+
19
+ .. note::
20
+
21
+ You should not use or manipulate this class directly.
22
+ Instead, use the general :class:`linkml_store.api.Collection`
23
+ """
24
+
25
+ @property
26
+ def mongo_collection(self) -> MongoCollection:
27
+ # collection_name = self.alias or self.name
28
+ collection_name = self.alias
29
+ if not collection_name:
30
+ raise ValueError("Collection name not set")
31
+ return self.parent.native_db[collection_name]
32
+
33
+ def _check_if_initialized(self) -> bool:
34
+ return self.alias in self.parent.native_db.list_collection_names()
35
+
36
+ def insert(self, objs: Union[OBJECT, List[OBJECT]], **kwargs):
37
+ if not isinstance(objs, list):
38
+ objs = [objs]
39
+ self.mongo_collection.insert_many(objs)
40
+ # TODO: allow mapping of _id to id for efficiency
41
+ for obj in objs:
42
+ del obj["_id"]
43
+ self._post_insert_hook(objs)
44
+
45
+ def index(
46
+ self,
47
+ objs: Union[OBJECT, List[OBJECT]],
48
+ index_name: Optional[str] = None,
49
+ replace: bool = False,
50
+ unique: bool = False,
51
+ **kwargs,
52
+ ):
53
+ """
54
+ Create indexes on the collection.
55
+
56
+ :param objs: Field(s) to index.
57
+ :param index_name: Optional name for the index.
58
+ :param replace: If True, the index will be dropped and recreated.
59
+ :param unique: If True, creates a unique index (default: False).
60
+ """
61
+
62
+ if not isinstance(objs, list):
63
+ objs = [objs]
64
+
65
+ existing_indexes = self.mongo_collection.index_information()
66
+
67
+ for obj in objs:
68
+ field_exists = False
69
+ index_to_drop = None
70
+
71
+ # Extract existing index details
72
+ for index_name_existing, index_details in existing_indexes.items():
73
+ indexed_fields = [field[0] for field in index_details.get("key", [])] # Extract field names
74
+
75
+ if obj in indexed_fields: # If this field is already indexed
76
+ field_exists = True
77
+ index_to_drop = index_name_existing if replace else None
78
+
79
+ # Drop the index if replace=True and index_to_drop is valid
80
+ if index_to_drop:
81
+ self.mongo_collection.drop_index(index_to_drop)
82
+ logging.debug(f"Dropped existing index: {index_to_drop}")
83
+
84
+ # Create the new index only if it doesn't exist or was dropped
85
+ if not field_exists or replace:
86
+ self.mongo_collection.create_index(obj, name=index_name, unique=unique)
87
+ logging.debug(f"Created new index: {index_name} on field {obj}, unique={unique}")
88
+ else:
89
+ logging.debug(f"Index already exists for field {obj}, skipping creation.")
90
+
91
+ def upsert(
92
+ self,
93
+ objs: Union[OBJECT, List[OBJECT]],
94
+ filter_fields: List[str],
95
+ update_fields: Optional[List[str]] = None,
96
+ **kwargs,
97
+ ):
98
+ """
99
+ Upsert one or more documents into the MongoDB collection.
100
+
101
+ :param objs: The document(s) to insert or update.
102
+ :param filter_fields: List of field names to use as the filter for matching existing documents.
103
+ :param update_fields: List of field names to include in the update. If None, all fields are updated.
104
+ """
105
+ if not isinstance(objs, list):
106
+ objs = [objs]
107
+
108
+ for obj in objs:
109
+ # Ensure filter fields exist in the object
110
+ filter_criteria = {field: obj[field] for field in filter_fields if field in obj}
111
+ if not filter_criteria:
112
+ raise ValueError("At least one valid filter field must be present in each object.")
113
+
114
+ # Check if a document already exists
115
+ existing_doc = self.mongo_collection.find_one(filter_criteria)
116
+
117
+ if existing_doc:
118
+ # Update only changed fields
119
+ updates = {key: obj[key] for key in update_fields if key in obj and obj[key] != existing_doc.get(key)}
120
+
121
+ if updates:
122
+ self.mongo_collection.update_one(filter_criteria, {"$set": updates})
123
+ logging.debug(f"Updated existing document: {filter_criteria} with {updates}")
124
+ else:
125
+ logging.debug(f"No changes detected for document: {filter_criteria}. Skipping update.")
126
+ else:
127
+ # Insert a new document
128
+ self.mongo_collection.insert_one(obj)
129
+ logging.debug(f"Inserted new document: {obj}")
130
+
131
+ def query(self, query: Query, limit: Optional[int] = None, offset: Optional[int] = None, **kwargs) -> QueryResult:
132
+ mongo_filter = self._build_mongo_filter(query.where_clause)
133
+ limit = limit or query.limit
134
+
135
+ # Build projection if select_cols are provided
136
+ projection = None
137
+ if query.select_cols:
138
+ projection = {"_id": 0}
139
+ for col in query.select_cols:
140
+ projection[col] = 1
141
+
142
+ cursor = self.mongo_collection.find(mongo_filter, projection)
143
+ if limit and limit >= 0:
144
+ cursor = cursor.limit(limit)
145
+ offset = offset or query.offset
146
+ if offset and offset >= 0:
147
+ cursor = cursor.skip(offset)
148
+
149
+ select_cols = query.select_cols
150
+
151
+ def _as_row(row: dict):
152
+ row = copy(row)
153
+ if "_id" in row:
154
+ del row["_id"]
155
+
156
+ if select_cols:
157
+ # For nested fields, ensure we handle them properly
158
+ result = {}
159
+ for col in select_cols:
160
+ # If it's a nested field (contains dots)
161
+ if "." in col or "[" in col:
162
+ result[col] = object_path_get(row, col)
163
+ elif col in row:
164
+ result[col] = row[col]
165
+ return result
166
+ return row
167
+
168
+ rows = [_as_row(row) for row in cursor]
169
+ count = self.mongo_collection.count_documents(mongo_filter)
170
+
171
+ return QueryResult(query=query, num_rows=count, rows=rows)
172
+
173
+ def _build_mongo_filter(self, where_clause: Dict[str, Any]) -> Dict[str, Any]:
174
+ mongo_filter = {}
175
+ if where_clause:
176
+ for field, value in where_clause.items():
177
+ mongo_filter[field] = value
178
+ return mongo_filter
179
+
180
+ from typing import Any, Dict, List, Union
181
+
182
+ def query_facets(
183
+ self,
184
+ where: Dict = None,
185
+ facet_columns: List[Union[str, Tuple[str, ...]]] = None,
186
+ facet_limit=DEFAULT_FACET_LIMIT,
187
+ **kwargs,
188
+ ) -> Dict[Union[str, Tuple[str, ...]], List[Tuple[Any, int]]]:
189
+ if facet_limit is None:
190
+ facet_limit = DEFAULT_FACET_LIMIT
191
+ results = {}
192
+ if not facet_columns:
193
+ facet_columns = list(self.class_definition().attributes.keys())
194
+
195
+ for col in facet_columns:
196
+ logger.debug(f"Faceting on {col}")
197
+
198
+ # Handle tuple columns
199
+ if isinstance(col, tuple):
200
+ group_id = {k.replace(".", "_"): f"${k}" for k in col}
201
+ all_fields = col
202
+ else:
203
+ group_id = f"${col}"
204
+ all_fields = [col]
205
+
206
+ # Initial pipeline without unwinding
207
+ facet_pipeline = [
208
+ {"$match": where} if where else {"$match": {}},
209
+ {"$group": {"_id": group_id, "count": {"$sum": 1}}},
210
+ {"$sort": {"count": -1}},
211
+ {"$limit": facet_limit},
212
+ ]
213
+
214
+ logger.info(f"Initial facet pipeline: {facet_pipeline}")
215
+ initial_results = list(self.mongo_collection.aggregate(facet_pipeline))
216
+
217
+ # Check if we need to unwind based on the results
218
+ needs_unwinding = False
219
+ if isinstance(col, tuple):
220
+ needs_unwinding = any(
221
+ isinstance(result["_id"], dict) and any(isinstance(v, list) for v in result["_id"].values())
222
+ for result in initial_results
223
+ )
224
+ else:
225
+ needs_unwinding = any(isinstance(result["_id"], list) for result in initial_results)
226
+
227
+ if needs_unwinding:
228
+ logger.info(f"Detected array values for {col}, unwinding...")
229
+ facet_pipeline = [{"$match": where} if where else {"$match": {}}]
230
+
231
+ # Unwind each field if needed
232
+ for field in all_fields:
233
+ field_parts = field.split(".")
234
+ for i in range(len(field_parts)):
235
+ facet_pipeline.append({"$unwind": f"${'.'.join(field_parts[:i + 1])}"})
236
+
237
+ facet_pipeline.extend(
238
+ [
239
+ {"$group": {"_id": group_id, "count": {"$sum": 1}}},
240
+ {"$sort": {"count": -1}},
241
+ {"$limit": facet_limit},
242
+ ]
243
+ )
244
+
245
+ logger.info(f"Updated facet pipeline with unwinding: {facet_pipeline}")
246
+ facet_results = list(self.mongo_collection.aggregate(facet_pipeline))
247
+ else:
248
+ facet_results = initial_results
249
+
250
+ logger.info(f"Facet results: {facet_results}")
251
+
252
+ # Process results
253
+ if isinstance(col, tuple):
254
+ results[col] = [
255
+ (tuple(result["_id"].values()), result["count"])
256
+ for result in facet_results
257
+ if result["_id"] is not None and all(v is not None for v in result["_id"].values())
258
+ ]
259
+ else:
260
+ results[col] = [
261
+ (result["_id"], result["count"]) for result in facet_results if result["_id"] is not None
262
+ ]
263
+
264
+ return results
265
+
266
+ def delete(self, objs: Union[OBJECT, List[OBJECT]], **kwargs) -> int:
267
+ if not isinstance(objs, list):
268
+ objs = [objs]
269
+ filter_conditions = []
270
+ for obj in objs:
271
+ filter_condition = {}
272
+ for key, value in obj.items():
273
+ filter_condition[key] = value
274
+ filter_conditions.append(filter_condition)
275
+ result = self.mongo_collection.delete_many({"$or": filter_conditions})
276
+ return result.deleted_count
277
+
278
+ def delete_where(self, where: Optional[Dict[str, Any]] = None, missing_ok=True, **kwargs) -> int:
279
+ logger.info(f"Deleting from {self.target_class_name} where: {where}")
280
+ if where is None:
281
+ where = {}
282
+ result = self.mongo_collection.delete_many(where)
283
+ deleted_rows_count = result.deleted_count
284
+ if deleted_rows_count == 0 and not missing_ok:
285
+ raise ValueError(f"No rows found for {where}")
286
+ return deleted_rows_count
287
+
288
+ def group_by(
289
+ self,
290
+ group_by_fields: List[str],
291
+ inlined_field="objects",
292
+ agg_map: Optional[Dict[str, str]] = None,
293
+ where: Optional[Dict] = None,
294
+ **kwargs,
295
+ ) -> QueryResult:
296
+ """
297
+ Group objects in the collection by specified fields using MongoDB's aggregation pipeline.
298
+
299
+ This implementation leverages MongoDB's native aggregation capabilities for efficient grouping.
300
+
301
+ :param group_by_fields: List of fields to group by
302
+ :param inlined_field: Field name to store aggregated objects
303
+ :param agg_map: Dictionary mapping aggregation types to fields
304
+ :param where: Filter conditions
305
+ :param kwargs: Additional arguments
306
+ :return: Query result containing grouped data
307
+ """
308
+ if isinstance(group_by_fields, str):
309
+ group_by_fields = [group_by_fields]
310
+
311
+ # Build the group key for MongoDB
312
+ if len(group_by_fields) == 1:
313
+ # Single field grouping
314
+ group_id = f"${group_by_fields[0]}"
315
+ else:
316
+ # Multi-field grouping
317
+ group_id = {field: f"${field}" for field in group_by_fields}
318
+
319
+ # Start building the pipeline
320
+ pipeline = []
321
+
322
+ # Add match stage if where clause is provided
323
+ if where:
324
+ pipeline.append({"$match": where})
325
+
326
+ # Add the group stage
327
+ group_stage = {"$group": {"_id": group_id, "objects": {"$push": "$$ROOT"}}}
328
+ pipeline.append(group_stage)
329
+
330
+ # Execute the aggregation
331
+ logger.debug(f"MongoDB group_by pipeline: {pipeline}")
332
+ aggregation_results = list(self.mongo_collection.aggregate(pipeline))
333
+
334
+ # Transform the results to match the expected format
335
+ results = []
336
+ for result in aggregation_results:
337
+ # Skip null groups if needed
338
+ if result["_id"] is None and kwargs.get("skip_nulls", False):
339
+ continue
340
+
341
+ # Create the group object
342
+ if isinstance(result["_id"], dict):
343
+ # Multi-field grouping
344
+ group_obj = result["_id"]
345
+ else:
346
+ # Single field grouping
347
+ group_obj = {group_by_fields[0]: result["_id"]}
348
+
349
+ # Add the grouped objects
350
+ objects = result["objects"]
351
+
352
+ # Remove MongoDB _id field from each object
353
+ for obj in objects:
354
+ if "_id" in obj:
355
+ del obj["_id"]
356
+
357
+ # Apply any field selection or transformations based on agg_map
358
+ if agg_map:
359
+ # Get first fields (fields to keep as single values)
360
+ first_fields = agg_map.get("first", [])
361
+ if first_fields:
362
+ # These are already in the group_obj from the _id
363
+ pass
364
+
365
+ # Get list fields (fields to aggregate as lists)
366
+ list_fields = agg_map.get("list", [])
367
+ if list_fields:
368
+ # Filter objects to only include specified fields
369
+ objects = [{k: obj.get(k) for k in list_fields if k in obj} for obj in objects]
370
+ elif not list_fields and first_fields:
371
+ # If list_fields is empty but first_fields is specified,
372
+ # filter out first_fields from objects to avoid duplication
373
+ objects = [{k: v for k, v in obj.items() if k not in first_fields} for obj in objects]
374
+
375
+ # Add the objects to the group
376
+ group_obj[inlined_field] = objects
377
+ results.append(group_obj)
378
+
379
+ return QueryResult(num_rows=len(results), rows=results)
@@ -0,0 +1,114 @@
1
+ # mongodb_database.py
2
+
3
+ import logging
4
+ from pathlib import Path
5
+ from typing import Optional, Union
6
+ from urllib.parse import urlparse
7
+
8
+ from pymongo import MongoClient
9
+ from pymongo.database import Database as NativeDatabase
10
+
11
+ from linkml_store.api import Database
12
+ from linkml_store.api.queries import Query, QueryResult
13
+ from linkml_store.api.stores.mongodb.mongodb_collection import MongoDBCollection
14
+ from linkml_store.utils.file_utils import safe_remove_directory
15
+ from linkml_store.utils.format_utils import Format
16
+ from linkml_store.utils.mongodb_utils import import_mongodb
17
+
18
+ logger = logging.getLogger(__name__)
19
+
20
+
21
+ class MongoDBDatabase(Database):
22
+ """
23
+ An adapter for MongoDB databases.
24
+
25
+ The LinkML-Store Database abstraction combines mongodb Client and Database.
26
+ """
27
+
28
+ _native_client: MongoClient = None
29
+ _native_db = None
30
+ collection_class = MongoDBCollection
31
+
32
+ def __init__(self, handle: Optional[str] = None, **kwargs):
33
+ if handle is None:
34
+ handle = "mongodb://localhost:27017/test"
35
+ if handle == "mongodb":
36
+ handle = "mongodb://localhost:27017/temporary"
37
+ super().__init__(handle=handle, **kwargs)
38
+
39
+ @property
40
+ def _db_name(self) -> str:
41
+ if self.handle:
42
+ parsed_url = urlparse(self.handle)
43
+ path_parts = parsed_url.path.lstrip("/").split("?")[0].split("/")
44
+ db_name = path_parts[0] if path_parts else "default"
45
+ if not db_name:
46
+ db_name = self.alias
47
+ else:
48
+ db_name = "default"
49
+ return db_name
50
+
51
+ @property
52
+ def native_client(self) -> MongoClient:
53
+ if self._native_client is None:
54
+ self._native_client = MongoClient(self.handle)
55
+ return self._native_client
56
+
57
+ @property
58
+ def native_db(self) -> NativeDatabase:
59
+ if self._native_db is None:
60
+ alias = self.metadata.alias
61
+ if not alias:
62
+ alias = "default"
63
+ self._native_db = self.native_client[self._db_name]
64
+ return self._native_db
65
+
66
+ def commit(self, **kwargs):
67
+ pass
68
+
69
+ def close(self, **kwargs):
70
+ if self._native_client:
71
+ self._native_client.close()
72
+
73
+ def drop(self, **kwargs):
74
+ self.native_client.drop_database(self.native_db.name)
75
+
76
+ def query(self, query: Query, **kwargs) -> QueryResult:
77
+ if query.from_table:
78
+ collection = self.get_collection(query.from_table)
79
+ return collection.query(query, **kwargs)
80
+ else:
81
+ raise NotImplementedError(f"Querying without a table is not supported in {self.__class__.__name__}")
82
+
83
+ def init_collections(self):
84
+ if self._collections is None:
85
+ self._collections = {}
86
+
87
+ for collection_name in self.native_db.list_collection_names():
88
+ if collection_name not in self._collections:
89
+ collection = MongoDBCollection(name=collection_name, parent=self)
90
+ self._collections[collection_name] = collection
91
+
92
+ def export_database(self, location: str, target_format: Optional[Union[str, Format]] = None, **kwargs):
93
+ if target_format == Format.DUMP_MONGODB.value or target_format == Format.DUMP_MONGODB:
94
+ path = Path(location)
95
+ if path.exists():
96
+ safe_remove_directory(path, no_backup=True)
97
+ from linkml_store.utils.mongodb_utils import export_mongodb
98
+
99
+ export_mongodb(self.handle, location)
100
+ else:
101
+ super().export_database(location, target_format=target_format, **kwargs)
102
+
103
+ def import_database(self, location: str, source_format: Optional[str] = None, **kwargs):
104
+ """
105
+ Import a database from a file or location.
106
+
107
+ :param location: location of the file
108
+ :param source_format: source format
109
+ :param kwargs: additional arguments
110
+ """
111
+ if source_format == Format.DUMP_MONGODB.value or source_format == Format.DUMP_MONGODB:
112
+ import_mongodb(self.handle, location, drop=True)
113
+ else:
114
+ super().import_database(location, source_format=source_format, **kwargs)
File without changes