linkml-store 0.1.13__py3-none-any.whl → 0.1.14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of linkml-store might be problematic. Click here for more details.

@@ -0,0 +1,193 @@
1
+ import logging
2
+ import os
3
+ import re
4
+ import shutil
5
+ from pathlib import Path
6
+ from typing import Dict, List, Optional, Union
7
+
8
+ import numpy as np
9
+ from linkml_runtime.utils.formatutils import underscore
10
+ from sklearn.preprocessing import LabelEncoder, OneHotEncoder
11
+ from sklearn.tree import DecisionTreeClassifier, _tree, export_graphviz
12
+
13
+ logger = logging.getLogger(__name__)
14
+
15
+
16
+ def tree_to_nested_expression(
17
+ tree: DecisionTreeClassifier,
18
+ feature_names: List[str],
19
+ categorical_features: Optional[List[str]] = None,
20
+ feature_encoders: Optional[Dict[str, Union[OneHotEncoder, LabelEncoder]]] = None,
21
+ target_encoder: Optional[LabelEncoder] = None,
22
+ ) -> str:
23
+ """
24
+ Convert a trained scikit-learn DecisionTreeClassifier to a nested Python conditional expression.
25
+
26
+ Args:
27
+ tree (DecisionTreeClassifier): A trained decision tree classifier.
28
+ feature_names (list): List of feature names (including one-hot encoded feature names).
29
+ categorical_features (list): List of original categorical feature names.
30
+ feature_encoders (dict): Dictionary mapping feature names to their respective OneHotEncoders or LabelEncoders.
31
+ target_encoder (LabelEncoder, optional): LabelEncoder for the target variable if it's categorical.
32
+
33
+ Returns:
34
+ str: A string representing the nested Python conditional expression.
35
+
36
+ Example:
37
+ >>> import numpy as np
38
+ >>> from sklearn.tree import DecisionTreeClassifier
39
+ >>> from sklearn.preprocessing import OneHotEncoder, LabelEncoder
40
+ >>>
41
+ >>> # Prepare sample data
42
+ >>> X = np.array([[0, 'A'], [0, 'B'], [1, 'A'], [1, 'B']])
43
+ >>> y = np.array(['No', 'Yes', 'Yes', 'No'])
44
+ >>>
45
+ >>> # Prepare the encoders
46
+ >>> feature_encoders = {'feature2': OneHotEncoder(sparse_output=False, handle_unknown='ignore')}
47
+ >>> target_encoder = LabelEncoder()
48
+ >>>
49
+ >>> # Encode the categorical feature and target
50
+ >>> X_encoded = np.column_stack([
51
+ ... X[:, 0],
52
+ ... feature_encoders['feature2'].fit_transform(X[:, 1].reshape(-1, 1))
53
+ ... ])
54
+ >>> y_encoded = target_encoder.fit_transform(y)
55
+ >>>
56
+ >>> # Train the decision tree
57
+ >>> clf = DecisionTreeClassifier(random_state=42)
58
+ >>> clf.fit(X_encoded, y_encoded)
59
+ DecisionTreeClassifier(random_state=42)
60
+ >>>
61
+ >>> # Convert to nested expression
62
+ >>> feature_names = ['feature1', 'feature2_A', 'feature2_B']
63
+ >>> categorical_features = ['feature2']
64
+ >>> expression = tree_to_nested_expression(clf, feature_names,
65
+ ... categorical_features, feature_encoders, target_encoder)
66
+ >>> print(expression)
67
+ (("Yes" if ({feature1} <= 0.5000) else "No") if ({feature2} == "A")
68
+ else ("No" if ({feature1} <= 0.5000) else "Yes"))
69
+ """
70
+ tree_ = tree.tree_
71
+ feature_name = [feature_names[i] if i != _tree.TREE_UNDEFINED else "undefined!" for i in tree_.feature]
72
+
73
+ categorical_features = set(categorical_features or [])
74
+
75
+ def get_original_feature_name(name):
76
+ return name.split("_")[0] if "_" in name else name
77
+
78
+ def recurse(node):
79
+ if tree_.feature[node] != _tree.TREE_UNDEFINED:
80
+ name = feature_name[node]
81
+ threshold = tree_.threshold[node]
82
+ original_name = get_original_feature_name(name)
83
+ original_name_safe = underscore(original_name)
84
+ name_safe = underscore(name)
85
+
86
+ original_name_safe = "{" + original_name_safe + "}"
87
+ name_safe = "{" + name_safe + "}"
88
+
89
+ if original_name in categorical_features:
90
+ if feature_encoders is None or original_name not in feature_encoders:
91
+ raise ValueError(f"Encoder is required for categorical feature {original_name}")
92
+
93
+ encoder = feature_encoders[original_name]
94
+ if isinstance(encoder, OneHotEncoder):
95
+ # For one-hot encoded features, we check if the specific category is present
96
+ category = name.split("_", 1)[1] # Get everything after the first underscore
97
+ condition = f'{original_name_safe} == "{category}"'
98
+ elif isinstance(encoder, LabelEncoder):
99
+ category = encoder.inverse_transform([int(threshold)])[0]
100
+ condition = f'{original_name_safe} == "{category}"'
101
+ else:
102
+ raise ValueError(f"Unsupported encoder type for feature {original_name}")
103
+ else:
104
+ if np.isinf(threshold):
105
+ condition = "True"
106
+ else:
107
+ condition = f"{name_safe} <= {threshold:.4f}"
108
+
109
+ left_expr = recurse(tree_.children_left[node])
110
+ right_expr = recurse(tree_.children_right[node])
111
+
112
+ return f"({left_expr} if ({condition}) else {right_expr})"
113
+ else:
114
+ class_index = np.argmax(tree_.value[node])
115
+ if target_encoder:
116
+ class_label = target_encoder.inverse_transform([class_index])[0]
117
+ return f'"{class_label}"'
118
+ else:
119
+ return str(class_index)
120
+
121
+ return recurse(0)
122
+
123
+
124
+ def escape_label(s: str) -> str:
125
+ """Escape special characters in label strings."""
126
+ s = str(s)
127
+ return re.sub(r"([<>])", r"\\\1", s)
128
+
129
+
130
+ def visualize_decision_tree(
131
+ clf: DecisionTreeClassifier,
132
+ feature_names: List[str],
133
+ class_names: List[str] = None,
134
+ output_file: Union[Path, str] = "decision_tree.png",
135
+ ) -> None:
136
+ """
137
+ Generate a visualization of the decision tree and save it as a PNG file.
138
+
139
+ :param clf: Trained DecisionTreeClassifier
140
+ :param feature_names: List of feature names
141
+ :param class_names: List of class names (optional)
142
+ :param output_file: The name of the file to save the visualization (default: "decision_tree.png")
143
+
144
+ >>> # Create a sample dataset
145
+ >>> import pandas as pd
146
+ >>> data = pd.DataFrame({
147
+ ... 'age': [25, 30, 35, 40, 45],
148
+ ... 'income': [50000, 60000, 70000, 80000, 90000],
149
+ ... 'credit_score': [600, 650, 700, 750, 800],
150
+ ... 'approved': ['No', 'No', 'Yes', 'Yes', 'Yes']
151
+ ... })
152
+ >>>
153
+ >>> # Prepare features and target
154
+ >>> X = data[['age', 'income', 'credit_score']]
155
+ >>> y = data['approved']
156
+ >>>
157
+ >>> # Encode target variable
158
+ >>> le = LabelEncoder()
159
+ >>> y_encoded = le.fit_transform(y)
160
+ >>>
161
+ >>> # Train a decision tree
162
+ >>> clf = DecisionTreeClassifier(random_state=42)
163
+ >>> _ = clf.fit(X, y_encoded)
164
+ >>> # Visualize the tree
165
+ >>> visualize_decision_tree(clf, X.columns.tolist(), le.classes_, "tests/output/test_tree.png")
166
+ """
167
+ # Escape special characters in feature names and class names
168
+ escaped_feature_names = [escape_label(name) for name in feature_names]
169
+ escaped_class_names = [escape_label(name) for name in (class_names if class_names is not None else [])]
170
+
171
+ import graphviz
172
+
173
+ dot_data = export_graphviz(
174
+ clf,
175
+ out_file=None,
176
+ feature_names=escaped_feature_names,
177
+ class_names=escaped_class_names,
178
+ filled=True,
179
+ rounded=True,
180
+ special_characters=True,
181
+ )
182
+ # dot_data = escape_label(dot_data)
183
+ logger.info(f"Dot: {dot_data}")
184
+ dot_path = shutil.which("dot")
185
+ if not dot_path:
186
+ logger.warning("Graphviz 'dot' executable not found in PATH. Skipping visualization.")
187
+ return
188
+ os.environ["GRAPHVIZ_DOT"] = dot_path
189
+
190
+ graph = graphviz.Source(dot_data)
191
+ if isinstance(output_file, Path):
192
+ output_file = str(output_file)
193
+ graph.render(output_file.rsplit(".", 1)[0], format="png", cleanup=True)
@@ -0,0 +1,53 @@
1
+ import numpy as np
2
+ import pandas as pd
3
+
4
+
5
+ def predictive_power(df, target_col, feature_cols, cv=5):
6
+ from sklearn.model_selection import cross_val_score
7
+ from sklearn.preprocessing import LabelEncoder
8
+ from sklearn.tree import DecisionTreeClassifier
9
+
10
+ # Prepare the data
11
+ X = df[feature_cols].copy() # Create an explicit copy
12
+ y = df[target_col].copy()
13
+
14
+ # Encode categorical variables
15
+ for col in X.columns:
16
+ if X[col].dtype == "object":
17
+ X[col] = LabelEncoder().fit_transform(X[col].astype(str))
18
+
19
+ if y.dtype == "object":
20
+ y = LabelEncoder().fit_transform(y.astype(str))
21
+
22
+ # Adjust cv based on the number of unique values in y
23
+ n_unique = len(np.unique(y))
24
+ cv = min(cv, n_unique)
25
+
26
+ # Train a decision tree and get cross-validated accuracy
27
+ clf = DecisionTreeClassifier(random_state=42)
28
+
29
+ if cv < 2:
30
+ # If cv is less than 2, we can't do cross-validation, so we'll just fit and score
31
+ clf.fit(X, y)
32
+ return clf.score(X, y)
33
+ else:
34
+ scores = cross_val_score(clf, X, y, cv=cv)
35
+ return scores.mean()
36
+
37
+
38
+ def analyze_predictive_power(df, columns=None, cv=5):
39
+ if columns is None:
40
+ columns = df.columns
41
+ results = pd.DataFrame(index=columns, columns=["predictive_power", "features"])
42
+
43
+ for target_col in columns:
44
+ feature_cols = [col for col in columns if col != target_col]
45
+ try:
46
+ power = predictive_power(df, target_col, feature_cols, cv)
47
+ results.loc[target_col, "predictive_power"] = power
48
+ results.loc[target_col, "features"] = ", ".join(feature_cols)
49
+ except Exception as e:
50
+ print(f"Error processing {target_col}: {str(e)}")
51
+ results.loc[target_col, "predictive_power"] = np.nan
52
+
53
+ return results
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: linkml-store
3
- Version: 0.1.13
3
+ Version: 0.1.14
4
4
  Summary: linkml-store
5
5
  License: MIT
6
6
  Author: Author 1
@@ -24,6 +24,7 @@ Provides-Extra: mongodb
24
24
  Provides-Extra: neo4j
25
25
  Provides-Extra: pyarrow
26
26
  Provides-Extra: renderer
27
+ Provides-Extra: scipy
27
28
  Provides-Extra: tests
28
29
  Provides-Extra: validation
29
30
  Requires-Dist: black (>=24.0.0) ; extra == "tests"
@@ -51,9 +52,12 @@ Requires-Dist: pyarrow ; extra == "pyarrow"
51
52
  Requires-Dist: pydantic (>=2.0.0,<3.0.0)
52
53
  Requires-Dist: pymongo ; extra == "mongodb"
53
54
  Requires-Dist: pystow (>=0.5.4,<0.6.0)
55
+ Requires-Dist: scikit-learn ; extra == "scipy"
56
+ Requires-Dist: scipy ; extra == "scipy"
54
57
  Requires-Dist: seaborn ; extra == "analytics"
55
58
  Requires-Dist: sqlalchemy
56
59
  Requires-Dist: streamlit (>=1.32.2,<2.0.0) ; extra == "app"
60
+ Requires-Dist: tiktoken ; extra == "llm"
57
61
  Requires-Dist: uvicorn ; extra == "fastapi"
58
62
  Description-Content-Type: text/markdown
59
63
 
@@ -61,7 +65,7 @@ Description-Content-Type: text/markdown
61
65
 
62
66
  An AI-ready data management and integration platform. LinkML-Store
63
67
  provides an abstraction layer over multiple different backends
64
- (including DuckDB, MongoDB, and local filesystems), allowing for
68
+ (including DuckDB, MongoDB, Neo4j, and local filesystems), allowing for
65
69
  common query, index, and storage operations.
66
70
 
67
71
  For full documentation, see [https://linkml.io/linkml-store/](https://linkml.io/linkml-store/)
@@ -99,6 +103,23 @@ linkml-store -d duckdb:///db/my.db -c persons validate
99
103
  * API
100
104
  * Streamlit applications
101
105
 
106
+ ## The CRUDSI pattern
107
+
108
+ Most database APIs implement the **CRUD** pattern: Create, Read, Update, Delete.
109
+ LinkML-Store adds **Search** and **Inference** to this pattern, making it **CRUDSI**.
110
+
111
+ The notion of "Search" and "Inference" is intended to be flexible and extensible,
112
+ including:
113
+
114
+ * Search
115
+ * Traditional keyword search
116
+ * Search using LLM Vector embeddings (*without* a dedicated vector database)
117
+ * Pluggable specialized search, e.g. genomic sequence (not yet implemented)
118
+ * Inference (encompassing *validation*, *repair*, and inference of missing data)
119
+ * Classic rule-based inference
120
+ * Inference using LLM Retrieval Augmented Generation (RAG)
121
+ * Statistical/ML inference
122
+
102
123
  ## Features
103
124
 
104
125
  ### Multiple Adapters
@@ -108,6 +129,8 @@ LinkML-Store is designed to work with multiple backends, giving a common abstrac
108
129
  * [MongoDB](https://linkml.io/linkml-store/how-to/Use-MongoDB.html)
109
130
  * [DuckDB](https://linkml.io/linkml-store/tutorials/Python-Tutorial.html)
110
131
  * [Solr](https://linkml.io/linkml-store/how-to/Query-Solr-using-CLI.html)
132
+ * [Neo4j](https://linkml.io/linkml-store/how-to/Use-Neo4j.html)
133
+
111
134
  * Filesystem
112
135
 
113
136
  Coming soon: any RDBMS, any triplestore, Neo4J, HDF5-based stores, ChromaDB/Vector dbs ...
@@ -1,9 +1,9 @@
1
1
  linkml_store/__init__.py,sha256=jlU6WOUAn8cKIhzbTULmBTWpW9gZdEt7q_RI6KZN1bY,118
2
2
  linkml_store/api/__init__.py,sha256=3CelcFEFz0y3MkQAzhQ9JxHIt1zFk6nYZxSmYTo8YZE,226
3
- linkml_store/api/client.py,sha256=aiWhLSAY0Wj7EfoXdEXMtGId1LSbvGya8dGEKdOB0iI,10989
4
- linkml_store/api/collection.py,sha256=Qi89f-iUSC1zKUgqSMuooklteO0wbJfYbeVkpn8CbIM,36186
5
- linkml_store/api/config.py,sha256=E-ma5hXQrs5Gyet2BB6CFJyPqNjXwAr1pr9H7AT2vOc,5159
6
- linkml_store/api/database.py,sha256=W5PzpIua8w6eCUqugGqOV6Y7O15g5FSZzzaQKDOfqak,28297
3
+ linkml_store/api/client.py,sha256=3klBXenQVbLjNQF3WmYfjASt3zvKOfWaCNp5aJM81Ec,12034
4
+ linkml_store/api/collection.py,sha256=7JndC6A9r3OVbR9aB6d_bdaYN53XU4FpppUterygOaE,37800
5
+ linkml_store/api/config.py,sha256=71pxQ5jM-ETxJWU7CzmKjsH6IEJUMP5sml381u9TYVk,5654
6
+ linkml_store/api/database.py,sha256=QVvUuLQPCxB4cvsS7rXqPSfoHkhcMzP9vUcsjkuEYds,29051
7
7
  linkml_store/api/queries.py,sha256=w0qnNeCH6pC9WTGoEQYd300MF6o0G3atz2YxN3WecAs,2028
8
8
  linkml_store/api/stores/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
9
9
  linkml_store/api/stores/chromadb/__init__.py,sha256=e9BkOPuPnVQKA5PRKDulag59yGNHDP3U2_DnPSrFAKM,132
@@ -20,7 +20,7 @@ linkml_store/api/stores/hdf5/__init__.py,sha256=l4cIh3v7P0nPbwGIsfuCMD_serQ8q8c7
20
20
  linkml_store/api/stores/hdf5/hdf5_collection.py,sha256=mnpLMYehn3PuaIjp2dXrIWu8jh-bdQ84X2Ku83jMdEY,3805
21
21
  linkml_store/api/stores/hdf5/hdf5_database.py,sha256=EZbjrpaqiNDEFvoD5dZNcGBXA8z6HRNL81emueTZWNw,2714
22
22
  linkml_store/api/stores/mongodb/__init__.py,sha256=OSFCr7RQlDEe-O-Y0P_i912oAMK-L3pC7Cnj7sxlwAk,510
23
- linkml_store/api/stores/mongodb/mongodb_collection.py,sha256=W-j1tkk8h1-zzjIRe-IYAnAOwlFSwr4fnDtgiu7RsVI,6695
23
+ linkml_store/api/stores/mongodb/mongodb_collection.py,sha256=unN0v7RYlGIiJxEhbNRxZ86TVQ4ELlAsNWTwEbg7E_g,6831
24
24
  linkml_store/api/stores/mongodb/mongodb_database.py,sha256=Y9MIV0KSRGCyopz8vGEivhSuvF0vZLCDJd29cdqMIX8,3857
25
25
  linkml_store/api/stores/neo4j/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
26
26
  linkml_store/api/stores/neo4j/neo4j_collection.py,sha256=a-Az5_ypdBMgeNyhrTW7q-ik-vYPCDDONIK7N_CDA9c,17449
@@ -30,7 +30,7 @@ linkml_store/api/stores/solr/solr_collection.py,sha256=ZlxC3JbVaHfSA4HuTeJTsp6qe
30
30
  linkml_store/api/stores/solr/solr_database.py,sha256=TFjqbY7jAkdrhAchbNg0E-mChSP7ogNwFExslbvX7Yo,2877
31
31
  linkml_store/api/stores/solr/solr_utils.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
32
32
  linkml_store/api/types.py,sha256=3aIQtDFMvsSmjuN5qrR2vNK5sHa6yzD_rEOPA6tHwvg,176
33
- linkml_store/cli.py,sha256=_oDbqElZIWKHJ6ruJyu57rbg2IYUr7U-0AqDlKz6jug,21126
33
+ linkml_store/cli.py,sha256=6JN5kqAIQeatKQndrQlHxMj-7hClRuaDNnR_fIx-9l4,26529
34
34
  linkml_store/constants.py,sha256=x4ZmDsfE9rZcL5WpA93uTKrRWzCD6GodYXviVzIvR38,112
35
35
  linkml_store/graphs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
36
36
  linkml_store/graphs/graph_map.py,sha256=bYRxv8n1YPnFqE9d6JKNmRawb8EAhsPlHhBue0gvtZE,712
@@ -39,19 +39,30 @@ linkml_store/index/implementations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQe
39
39
  linkml_store/index/implementations/llm_indexer.py,sha256=LI5f8SLF_rJY5W6wZPLaUqpyoq-VDW_KqlCBNDNm_po,4827
40
40
  linkml_store/index/implementations/simple_indexer.py,sha256=KnkFJtXTHnwjhD_D6ZK2rFhBID1dgCedcOVPEWAY2NU,1282
41
41
  linkml_store/index/indexer.py,sha256=K-TDPzdTyGFo6iG4XI_A_3IpwDbKeiTIbdr85NIL5r8,4918
42
+ linkml_store/inference/__init__.py,sha256=b8NAFNZjOYU_8gOvxdyCyoiHOOl5Ai2ckKs1tv7ZkkY,342
43
+ linkml_store/inference/implementations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
44
+ linkml_store/inference/implementations/rag_inference_engine.py,sha256=TG6nPy2QowycZRPKGwzBF80Wq6Qb7wyrA1gG_h47gYw,5812
45
+ linkml_store/inference/implementations/rule_based_inference_engine.py,sha256=bncdfRdqjj37DIUQLCzxTJNMwEHPWsWXiom4iN6m_q8,5774
46
+ linkml_store/inference/implementations/sklearn_inference_engine.py,sha256=UsvtwS76lSyf3_7O1cLNxoBqkPIRwCqJcdiOFKy7lw4,12672
47
+ linkml_store/inference/inference_config.py,sha256=xgl3VmueErLIOnQQn4HdC2STJNY6yKoPasWmym4ltHQ,2014
48
+ linkml_store/inference/inference_engine.py,sha256=-X8UFZwEYfavM-cfkEb-77JptWmNQUHBnxAuKdjUi7o,5513
49
+ linkml_store/inference/inference_engine_registry.py,sha256=6o66gvBYBwdeAKm62zqqvfaBlcopVP_cla3L6uXGsHA,3015
42
50
  linkml_store/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
43
51
  linkml_store/utils/change_utils.py,sha256=O2rvSvgTKB60reLLz9mX5OWykAA_m93bwnUh5ZWa0EY,471
44
52
  linkml_store/utils/file_utils.py,sha256=rQ7-XpmI6_Kx_dhEnI98muFRr0MmgI_kZ_9cgJBf_0I,1411
45
- linkml_store/utils/format_utils.py,sha256=aBS0uUhvdgPEXzw72iGfSezmHZFnqHtPuFUbVR4cfbA,13174
53
+ linkml_store/utils/format_utils.py,sha256=JA7E-XYgkIGTXBSyTKSRrN7ZHeXOJ8-wpSjRTCAAAso,10680
46
54
  linkml_store/utils/io.py,sha256=JHUrWDtlZC2jtN_PQZ4ypdGIyYlftZEN3JaCvEPs44w,884
55
+ linkml_store/utils/llm_utils.py,sha256=ItszPvHuQ6joU6vf30jGYfut5q9vrma3eYHighr1wOs,2840
47
56
  linkml_store/utils/mongodb_utils.py,sha256=Rl1YmMKs1IXwSsJIViSDChbi0Oer5cBnMmjka2TeQS8,4665
48
57
  linkml_store/utils/neo4j_utils.py,sha256=y3KPmDZ8mQmePgg0lUeKkeKqzEr2rV226xxEtHc5pRg,1266
49
- linkml_store/utils/object_utils.py,sha256=is6T2gruvVKvWD5ZntcAl6Qi3L154FObEho_b_crTuE,2539
50
- linkml_store/utils/pandas_utils.py,sha256=INL8aZ5v2OeLg-Uzfa8P1cpnMMKA1DumiTB0q175tw8,1389
58
+ linkml_store/utils/object_utils.py,sha256=dAF9WmtWwAXdAnTN-UwbiCbBJt3Wdcsp8Z4lz8lVscY,2642
59
+ linkml_store/utils/pandas_utils.py,sha256=djiFPO3YbgRVo2XAZuKCtgH8QVLuUyPIsfS8e-0umsU,3182
51
60
  linkml_store/utils/patch_utils.py,sha256=q-h_v68okyruzdPTEHCe0WubbQHKpi1qy5bJ9vFWDo8,4823
52
61
  linkml_store/utils/query_utils.py,sha256=HWt46BsGWoIGiNBTtvpXGY6onPRWsQky6eu_9cYqbvo,3440
53
62
  linkml_store/utils/schema_utils.py,sha256=iJiZxo5NGr7v87h4DV6V9DrDOZHSswMRuf0N4V2rVtg,646
63
+ linkml_store/utils/sklearn_utils.py,sha256=itPpcrsbbyOazdjmivaaZ1lyZeytm0a0hJ2AS8ziUgg,7590
54
64
  linkml_store/utils/sql_utils.py,sha256=T41w_vsc3SauTJQkDMwid_nOtKW1YOKyUuaxEf470hk,5938
65
+ linkml_store/utils/stats_utils.py,sha256=4KqBb1bqDgAmq-1fJLLu5B2paPgoZZc3A-gnyVam4bI,1799
55
66
  linkml_store/webapi/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
56
67
  linkml_store/webapi/html/__init__.py,sha256=hwp5eeBJKH65Bvv1x9Z4vsT1tLSYtb9Dq4I9r1kL1q0,69
57
68
  linkml_store/webapi/html/base.html.j2,sha256=hoiV2uaSxxrQp7VuAZBOHueH7czyJMYcPBRN6dZFYhk,693
@@ -60,8 +71,8 @@ linkml_store/webapi/html/database_details.html.j2,sha256=qtXdavbZb0mohiObI9dvJtk
60
71
  linkml_store/webapi/html/databases.html.j2,sha256=a9BCWQYfPeFhdUd31CWhB0yWhTIFXQayO08JgjyqKoc,294
61
72
  linkml_store/webapi/html/generic.html.j2,sha256=KtLaO2HUEF2Opq-OwHKgRKetNWe8IWc6JuIkxRPsywk,1018
62
73
  linkml_store/webapi/main.py,sha256=B0Da575kKR7X88N9ykm99Dem8FyBAW9f-w3A_JwUzfw,29165
63
- linkml_store-0.1.13.dist-info/LICENSE,sha256=77mDOslUnalYnuq9xQYZKtIoNEzcH9mIjvWHOKjamnE,1086
64
- linkml_store-0.1.13.dist-info/METADATA,sha256=ouYb20TN5M2D12IUx9qspIIOiI-6qmrPpe1b6xx9xhU,5609
65
- linkml_store-0.1.13.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
66
- linkml_store-0.1.13.dist-info/entry_points.txt,sha256=gWxVsHqx-t-UKWFHFzawQTvs4is4vC1rCF5AeKyqWWk,101
67
- linkml_store-0.1.13.dist-info/RECORD,,
74
+ linkml_store-0.1.14.dist-info/LICENSE,sha256=77mDOslUnalYnuq9xQYZKtIoNEzcH9mIjvWHOKjamnE,1086
75
+ linkml_store-0.1.14.dist-info/METADATA,sha256=T5BZ87qj1w2xyCkQErRa4ErFwvDKNMfahOGWYRkPQmg,6525
76
+ linkml_store-0.1.14.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
77
+ linkml_store-0.1.14.dist-info/entry_points.txt,sha256=gWxVsHqx-t-UKWFHFzawQTvs4is4vC1rCF5AeKyqWWk,101
78
+ linkml_store-0.1.14.dist-info/RECORD,,