kumoai 2.13.0.dev202511161731__cp312-cp312-macosx_11_0_arm64.whl → 2.13.0.dev202512011731__cp312-cp312-macosx_11_0_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kumoai/__init__.py +6 -9
- kumoai/_version.py +1 -1
- kumoai/client/client.py +9 -13
- kumoai/connector/utils.py +23 -2
- kumoai/experimental/rfm/__init__.py +162 -46
- kumoai/experimental/rfm/backend/__init__.py +0 -0
- kumoai/experimental/rfm/backend/local/__init__.py +38 -0
- kumoai/experimental/rfm/backend/local/table.py +151 -0
- kumoai/experimental/rfm/backend/sqlite/__init__.py +23 -0
- kumoai/experimental/rfm/backend/sqlite/table.py +117 -0
- kumoai/experimental/rfm/base/__init__.py +7 -0
- kumoai/experimental/rfm/base/column.py +66 -0
- kumoai/experimental/rfm/{local_table.py → base/table.py} +67 -139
- kumoai/experimental/rfm/{local_graph.py → graph.py} +44 -30
- kumoai/experimental/rfm/local_graph_sampler.py +0 -2
- kumoai/experimental/rfm/local_graph_store.py +12 -11
- kumoai/experimental/rfm/rfm.py +25 -14
- kumoai/experimental/rfm/sagemaker.py +138 -0
- kumoai/spcs.py +1 -3
- kumoai/testing/decorators.py +1 -1
- {kumoai-2.13.0.dev202511161731.dist-info → kumoai-2.13.0.dev202512011731.dist-info}/METADATA +9 -2
- {kumoai-2.13.0.dev202511161731.dist-info → kumoai-2.13.0.dev202512011731.dist-info}/RECORD +25 -17
- {kumoai-2.13.0.dev202511161731.dist-info → kumoai-2.13.0.dev202512011731.dist-info}/WHEEL +0 -0
- {kumoai-2.13.0.dev202511161731.dist-info → kumoai-2.13.0.dev202512011731.dist-info}/licenses/LICENSE +0 -0
- {kumoai-2.13.0.dev202511161731.dist-info → kumoai-2.13.0.dev202512011731.dist-info}/top_level.txt +0 -0
kumoai/__init__.py
CHANGED
|
@@ -184,15 +184,12 @@ def init(
|
|
|
184
184
|
snowflake_credentials
|
|
185
185
|
) if not api_key and snowflake_credentials else None
|
|
186
186
|
client = KumoClient(url=url, api_key=api_key, spcs_token=spcs_token)
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
else:
|
|
194
|
-
raise ValueError("Client authentication failed. Please check if you "
|
|
195
|
-
"have a valid API key.")
|
|
187
|
+
client.authenticate()
|
|
188
|
+
global_state._url = client._url
|
|
189
|
+
global_state._api_key = client._api_key
|
|
190
|
+
global_state._snowflake_credentials = snowflake_credentials
|
|
191
|
+
global_state._spcs_token = client._spcs_token
|
|
192
|
+
global_state._snowpark_session = snowpark_session
|
|
196
193
|
|
|
197
194
|
if not api_key and snowflake_credentials:
|
|
198
195
|
# Refresh token every 10 minutes (expires in 1 hour):
|
kumoai/_version.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = '2.13.0.
|
|
1
|
+
__version__ = '2.13.0.dev202512011731'
|
kumoai/client/client.py
CHANGED
|
@@ -20,7 +20,6 @@ if TYPE_CHECKING:
|
|
|
20
20
|
)
|
|
21
21
|
from kumoai.client.online import OnlineServingEndpointAPI
|
|
22
22
|
from kumoai.client.pquery import PQueryAPI
|
|
23
|
-
from kumoai.client.rfm import RFMAPI
|
|
24
23
|
from kumoai.client.source_table import SourceTableAPI
|
|
25
24
|
from kumoai.client.table import TableAPI
|
|
26
25
|
|
|
@@ -73,12 +72,15 @@ class KumoClient:
|
|
|
73
72
|
self._session.headers.update(
|
|
74
73
|
{'Authorization': f'Snowflake Token={self._spcs_token}'})
|
|
75
74
|
|
|
76
|
-
def authenticate(self) ->
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
75
|
+
def authenticate(self) -> None:
|
|
76
|
+
"""Raises an exception if authentication fails."""
|
|
77
|
+
try:
|
|
78
|
+
self._session.get(self._url + '/v1/connectors',
|
|
79
|
+
verify=self._verify_ssl).raise_for_status()
|
|
80
|
+
except Exception:
|
|
81
|
+
raise ValueError(
|
|
82
|
+
"Client authentication failed. Please check if you "
|
|
83
|
+
"have a valid API key/credentials.")
|
|
82
84
|
|
|
83
85
|
def set_spcs_token(self, spcs_token: str) -> None:
|
|
84
86
|
r"""Sets the SPCS token for the client and updates the session
|
|
@@ -163,12 +165,6 @@ class KumoClient:
|
|
|
163
165
|
from kumoai.client.online import OnlineServingEndpointAPI
|
|
164
166
|
return OnlineServingEndpointAPI(self)
|
|
165
167
|
|
|
166
|
-
@property
|
|
167
|
-
def rfm_api(self) -> 'RFMAPI':
|
|
168
|
-
r"""Returns the typed RFM API."""
|
|
169
|
-
from kumoai.client.rfm import RFMAPI
|
|
170
|
-
return RFMAPI(self)
|
|
171
|
-
|
|
172
168
|
def _request(self, endpoint: Endpoint, **kwargs: Any) -> requests.Response:
|
|
173
169
|
r"""Send a HTTP request to the specified endpoint."""
|
|
174
170
|
endpoint_str = endpoint.get_path()
|
kumoai/connector/utils.py
CHANGED
|
@@ -381,8 +381,29 @@ def _handle_duplicate_names(names: List[str]) -> List[str]:
|
|
|
381
381
|
|
|
382
382
|
|
|
383
383
|
def _sanitize_columns(names: List[str]) -> Tuple[List[str], bool]:
|
|
384
|
-
|
|
384
|
+
"""Normalize column names in a CSV or Parquet file.
|
|
385
|
+
|
|
386
|
+
Rules:
|
|
387
|
+
- Replace any non-alphanumeric character with "_"
|
|
388
|
+
- Strip leading/trailing underscores
|
|
389
|
+
- Ensure uniqueness by appending suffixes: _1, _2, ...
|
|
390
|
+
- Auto-name empty columns as auto_named_<n>
|
|
391
|
+
|
|
392
|
+
Returns:
|
|
393
|
+
(new_column_names, changed)
|
|
394
|
+
"""
|
|
395
|
+
_SAN_RE = re.compile(r"[^0-9A-Za-z,\t]")
|
|
396
|
+
# 1) Replace non-alphanumeric sequences with underscore
|
|
385
397
|
new = [_SAN_RE.sub("_", n).strip("_") for n in names]
|
|
398
|
+
|
|
399
|
+
# 2) Auto-name any empty column names to match UI behavior
|
|
400
|
+
unnamed_counter = 0
|
|
401
|
+
for i, n in enumerate(new):
|
|
402
|
+
if not n:
|
|
403
|
+
new[i] = f"auto_named_{unnamed_counter}"
|
|
404
|
+
unnamed_counter += 1
|
|
405
|
+
|
|
406
|
+
# 3) Ensure uniqueness (append suffixes where needed)
|
|
386
407
|
new = _handle_duplicate_names(new)
|
|
387
408
|
return new, new != names
|
|
388
409
|
|
|
@@ -1168,7 +1189,7 @@ def _detect_and_validate_csv(head_bytes: bytes) -> str:
|
|
|
1168
1189
|
- Re-serializes those rows and validates with pandas (small nrows) to catch
|
|
1169
1190
|
malformed inputs.
|
|
1170
1191
|
- Raises ValueError on empty input or if parsing fails with the chosen
|
|
1171
|
-
|
|
1192
|
+
delimiter.
|
|
1172
1193
|
"""
|
|
1173
1194
|
if not head_bytes:
|
|
1174
1195
|
raise ValueError("Could not auto-detect a delimiter: file is empty.")
|
|
@@ -1,43 +1,123 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
except Exception as e:
|
|
4
|
-
import platform
|
|
5
|
-
|
|
6
|
-
_msg = f"""RFM is not supported in your environment.
|
|
7
|
-
|
|
8
|
-
💻 Your Environment:
|
|
9
|
-
Python version: {platform.python_version()}
|
|
10
|
-
Operating system: {platform.system()}
|
|
11
|
-
CPU architecture: {platform.machine()}
|
|
12
|
-
glibc version: {platform.libc_ver()[1]}
|
|
13
|
-
|
|
14
|
-
✅ Supported Environments:
|
|
15
|
-
* Python versions: 3.10, 3.11, 3.12, 3.13
|
|
16
|
-
* Operating systems and CPU architectures:
|
|
17
|
-
* Linux (x86_64)
|
|
18
|
-
* macOS (arm64)
|
|
19
|
-
* Windows (x86_64)
|
|
20
|
-
* glibc versions: >=2.28
|
|
21
|
-
|
|
22
|
-
❌ Unsupported Environments:
|
|
23
|
-
* Python versions: 3.8, 3.9, 3.14
|
|
24
|
-
* Operating systems and CPU architectures:
|
|
25
|
-
* Linux (arm64)
|
|
26
|
-
* macOS (x86_64)
|
|
27
|
-
* Windows (arm64)
|
|
28
|
-
* glibc versions: <2.28
|
|
29
|
-
|
|
30
|
-
Please create a feature request at 'https://github.com/kumo-ai/kumo-rfm'."""
|
|
31
|
-
|
|
32
|
-
raise RuntimeError(_msg) from e
|
|
33
|
-
|
|
34
|
-
from typing import Optional, Dict
|
|
1
|
+
import ipaddress
|
|
2
|
+
import logging
|
|
35
3
|
import os
|
|
4
|
+
import re
|
|
5
|
+
import socket
|
|
6
|
+
import threading
|
|
7
|
+
from dataclasses import dataclass
|
|
8
|
+
from enum import Enum
|
|
9
|
+
from typing import Dict, Optional, Tuple
|
|
10
|
+
from urllib.parse import urlparse
|
|
11
|
+
|
|
36
12
|
import kumoai
|
|
37
|
-
from .
|
|
38
|
-
|
|
39
|
-
from .rfm import ExplainConfig, Explanation, KumoRFM
|
|
13
|
+
from kumoai.client.client import KumoClient
|
|
14
|
+
|
|
40
15
|
from .authenticate import authenticate
|
|
16
|
+
from .sagemaker import (
|
|
17
|
+
KumoClient_SageMakerAdapter,
|
|
18
|
+
KumoClient_SageMakerProxy_Local,
|
|
19
|
+
)
|
|
20
|
+
from .base import Table
|
|
21
|
+
from .backend.local import LocalTable
|
|
22
|
+
from .graph import Graph
|
|
23
|
+
from .rfm import ExplainConfig, Explanation, KumoRFM
|
|
24
|
+
|
|
25
|
+
logger = logging.getLogger('kumoai_rfm')
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def _is_local_address(host: str | None) -> bool:
|
|
29
|
+
"""Return True if the hostname/IP refers to the local machine."""
|
|
30
|
+
if not host:
|
|
31
|
+
return False
|
|
32
|
+
try:
|
|
33
|
+
infos = socket.getaddrinfo(host, None)
|
|
34
|
+
for _, _, _, _, sockaddr in infos:
|
|
35
|
+
ip = sockaddr[0]
|
|
36
|
+
ip_obj = ipaddress.ip_address(ip)
|
|
37
|
+
if ip_obj.is_loopback or ip_obj.is_unspecified:
|
|
38
|
+
return True
|
|
39
|
+
return False
|
|
40
|
+
except Exception:
|
|
41
|
+
return False
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class InferenceBackend(str, Enum):
|
|
45
|
+
REST = "REST"
|
|
46
|
+
LOCAL_SAGEMAKER = "LOCAL_SAGEMAKER"
|
|
47
|
+
AWS_SAGEMAKER = "AWS_SAGEMAKER"
|
|
48
|
+
UNKNOWN = "UNKNOWN"
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def _detect_backend(
|
|
52
|
+
url: str) -> Tuple[InferenceBackend, Optional[str], Optional[str]]:
|
|
53
|
+
parsed = urlparse(url)
|
|
54
|
+
|
|
55
|
+
# Remote SageMaker
|
|
56
|
+
if ("runtime.sagemaker" in parsed.netloc
|
|
57
|
+
and parsed.path.endswith("/invocations")):
|
|
58
|
+
# Example: https://runtime.sagemaker.us-west-2.amazonaws.com/
|
|
59
|
+
# endpoints/Name/invocations
|
|
60
|
+
match = re.search(r"runtime\.sagemaker\.([a-z0-9-]+)\.amazonaws\.com",
|
|
61
|
+
parsed.netloc)
|
|
62
|
+
region = match.group(1) if match else None
|
|
63
|
+
m = re.search(r"/endpoints/([^/]+)/invocations", parsed.path)
|
|
64
|
+
endpoint_name = m.group(1) if m else None
|
|
65
|
+
return InferenceBackend.AWS_SAGEMAKER, region, endpoint_name
|
|
66
|
+
|
|
67
|
+
# Local SageMaker
|
|
68
|
+
if parsed.port == 8080 and parsed.path.endswith(
|
|
69
|
+
"/invocations") and _is_local_address(parsed.hostname):
|
|
70
|
+
return InferenceBackend.LOCAL_SAGEMAKER, None, None
|
|
71
|
+
|
|
72
|
+
# Default: regular REST
|
|
73
|
+
return InferenceBackend.REST, None, None
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
@dataclass
|
|
77
|
+
class RfmGlobalState:
|
|
78
|
+
_url: str = '__url_not_provided__'
|
|
79
|
+
_backend: InferenceBackend = InferenceBackend.UNKNOWN
|
|
80
|
+
_region: Optional[str] = None
|
|
81
|
+
_endpoint_name: Optional[str] = None
|
|
82
|
+
_thread_local = threading.local()
|
|
83
|
+
|
|
84
|
+
# Thread-safe init-once.
|
|
85
|
+
_initialized: bool = False
|
|
86
|
+
_lock: threading.Lock = threading.Lock()
|
|
87
|
+
|
|
88
|
+
@property
|
|
89
|
+
def client(self) -> KumoClient:
|
|
90
|
+
if self._backend == InferenceBackend.REST:
|
|
91
|
+
return kumoai.global_state.client
|
|
92
|
+
|
|
93
|
+
if hasattr(self._thread_local, '_sagemaker'):
|
|
94
|
+
# Set the spcs token in the client to ensure it has the latest.
|
|
95
|
+
return self._thread_local._sagemaker
|
|
96
|
+
|
|
97
|
+
sagemaker_client: KumoClient
|
|
98
|
+
if self._backend == InferenceBackend.LOCAL_SAGEMAKER:
|
|
99
|
+
sagemaker_client = KumoClient_SageMakerProxy_Local(self._url)
|
|
100
|
+
else:
|
|
101
|
+
assert self._backend == InferenceBackend.AWS_SAGEMAKER
|
|
102
|
+
assert self._region
|
|
103
|
+
assert self._endpoint_name
|
|
104
|
+
sagemaker_client = KumoClient_SageMakerAdapter(
|
|
105
|
+
self._region, self._endpoint_name)
|
|
106
|
+
|
|
107
|
+
self._thread_local._sagemaker = sagemaker_client
|
|
108
|
+
return sagemaker_client
|
|
109
|
+
|
|
110
|
+
def reset(self) -> None: # For testing only.
|
|
111
|
+
with self._lock:
|
|
112
|
+
self._initialized = False
|
|
113
|
+
self._url = '__url_not_provided__'
|
|
114
|
+
self._backend = InferenceBackend.UNKNOWN
|
|
115
|
+
self._region = None
|
|
116
|
+
self._endpoint_name = None
|
|
117
|
+
self._thread_local = threading.local()
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
global_state = RfmGlobalState()
|
|
41
121
|
|
|
42
122
|
|
|
43
123
|
def init(
|
|
@@ -47,21 +127,57 @@ def init(
|
|
|
47
127
|
snowflake_application: Optional[str] = None,
|
|
48
128
|
log_level: str = "INFO",
|
|
49
129
|
) -> None:
|
|
50
|
-
|
|
51
|
-
|
|
130
|
+
with global_state._lock:
|
|
131
|
+
if global_state._initialized:
|
|
132
|
+
if url != global_state._url:
|
|
133
|
+
raise ValueError(
|
|
134
|
+
"Kumo RFM has already been initialized with a different "
|
|
135
|
+
"URL. Re-initialization with a different URL is not "
|
|
136
|
+
"supported.")
|
|
137
|
+
return
|
|
52
138
|
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
snowflake_application=snowflake_application,
|
|
56
|
-
log_level=log_level)
|
|
139
|
+
if url is None:
|
|
140
|
+
url = os.getenv("RFM_API_URL", "https://kumorfm.ai/api")
|
|
57
141
|
|
|
142
|
+
backend, region, endpoint_name = _detect_backend(url)
|
|
143
|
+
if backend == InferenceBackend.REST:
|
|
144
|
+
# Initialize kumoai.global_state
|
|
145
|
+
if (kumoai.global_state.initialized
|
|
146
|
+
and kumoai.global_state._url != url):
|
|
147
|
+
raise ValueError(
|
|
148
|
+
"Kumo AI SDK has already been initialized with different "
|
|
149
|
+
"API URL. Please restart Python interpreter and "
|
|
150
|
+
"initialize via kumoai.rfm.init()")
|
|
151
|
+
kumoai.init(url=url, api_key=api_key,
|
|
152
|
+
snowflake_credentials=snowflake_credentials,
|
|
153
|
+
snowflake_application=snowflake_application,
|
|
154
|
+
log_level=log_level)
|
|
155
|
+
elif backend == InferenceBackend.AWS_SAGEMAKER:
|
|
156
|
+
assert region
|
|
157
|
+
assert endpoint_name
|
|
158
|
+
KumoClient_SageMakerAdapter(region, endpoint_name).authenticate()
|
|
159
|
+
else:
|
|
160
|
+
assert backend == InferenceBackend.LOCAL_SAGEMAKER
|
|
161
|
+
KumoClient_SageMakerProxy_Local(url).authenticate()
|
|
162
|
+
|
|
163
|
+
global_state._url = url
|
|
164
|
+
global_state._backend = backend
|
|
165
|
+
global_state._region = region
|
|
166
|
+
global_state._endpoint_name = endpoint_name
|
|
167
|
+
global_state._initialized = True
|
|
168
|
+
logger.info("Kumo RFM initialized with backend: %s, url: %s", backend,
|
|
169
|
+
url)
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
LocalGraph = Graph # NOTE Backward compatibility - do not use anymore.
|
|
58
173
|
|
|
59
174
|
__all__ = [
|
|
175
|
+
'authenticate',
|
|
176
|
+
'init',
|
|
177
|
+
'Table',
|
|
60
178
|
'LocalTable',
|
|
61
|
-
'
|
|
179
|
+
'Graph',
|
|
62
180
|
'KumoRFM',
|
|
63
181
|
'ExplainConfig',
|
|
64
182
|
'Explanation',
|
|
65
|
-
'authenticate',
|
|
66
|
-
'init',
|
|
67
183
|
]
|
|
File without changes
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
try:
|
|
2
|
+
import kumoai.kumolib # noqa: F401
|
|
3
|
+
except Exception as e:
|
|
4
|
+
import platform
|
|
5
|
+
|
|
6
|
+
_msg = f"""RFM is not supported in your environment.
|
|
7
|
+
|
|
8
|
+
💻 Your Environment:
|
|
9
|
+
Python version: {platform.python_version()}
|
|
10
|
+
Operating system: {platform.system()}
|
|
11
|
+
CPU architecture: {platform.machine()}
|
|
12
|
+
glibc version: {platform.libc_ver()[1]}
|
|
13
|
+
|
|
14
|
+
✅ Supported Environments:
|
|
15
|
+
* Python versions: 3.10, 3.11, 3.12, 3.13
|
|
16
|
+
* Operating systems and CPU architectures:
|
|
17
|
+
* Linux (x86_64)
|
|
18
|
+
* macOS (arm64)
|
|
19
|
+
* Windows (x86_64)
|
|
20
|
+
* glibc versions: >=2.28
|
|
21
|
+
|
|
22
|
+
❌ Unsupported Environments:
|
|
23
|
+
* Python versions: 3.8, 3.9, 3.14
|
|
24
|
+
* Operating systems and CPU architectures:
|
|
25
|
+
* Linux (arm64)
|
|
26
|
+
* macOS (x86_64)
|
|
27
|
+
* Windows (arm64)
|
|
28
|
+
* glibc versions: <2.28
|
|
29
|
+
|
|
30
|
+
Please create a feature request at 'https://github.com/kumo-ai/kumo-rfm'."""
|
|
31
|
+
|
|
32
|
+
raise RuntimeError(_msg) from e
|
|
33
|
+
|
|
34
|
+
from .table import LocalTable
|
|
35
|
+
|
|
36
|
+
__all__ = [
|
|
37
|
+
'LocalTable',
|
|
38
|
+
]
|
|
@@ -0,0 +1,151 @@
|
|
|
1
|
+
from typing import List, Optional
|
|
2
|
+
|
|
3
|
+
import pandas as pd
|
|
4
|
+
from kumoapi.typing import Dtype, Stype
|
|
5
|
+
from typing_extensions import Self
|
|
6
|
+
|
|
7
|
+
from kumoai.experimental.rfm import utils
|
|
8
|
+
from kumoai.experimental.rfm.base import Column, Table
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class LocalTable(Table):
|
|
12
|
+
r"""A table backed by a :class:`pandas.DataFrame`.
|
|
13
|
+
|
|
14
|
+
A :class:`LocalTable` fully specifies the relevant metadata, *i.e.*
|
|
15
|
+
selected columns, column semantic types, primary keys and time columns.
|
|
16
|
+
:class:`LocalTable` is used to create a :class:`Graph`.
|
|
17
|
+
|
|
18
|
+
.. code-block:: python
|
|
19
|
+
|
|
20
|
+
import pandas as pd
|
|
21
|
+
import kumoai.experimental.rfm as rfm
|
|
22
|
+
|
|
23
|
+
# Load data from a CSV file:
|
|
24
|
+
df = pd.read_csv("data.csv")
|
|
25
|
+
|
|
26
|
+
# Create a table from a `pandas.DataFrame` and infer its metadata ...
|
|
27
|
+
table = rfm.LocalTable(df, name="my_table").infer_metadata()
|
|
28
|
+
|
|
29
|
+
# ... or create a table explicitly:
|
|
30
|
+
table = rfm.LocalTable(
|
|
31
|
+
df=df,
|
|
32
|
+
name="my_table",
|
|
33
|
+
primary_key="id",
|
|
34
|
+
time_column="time",
|
|
35
|
+
end_time_column=None,
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
# Verify metadata:
|
|
39
|
+
table.print_metadata()
|
|
40
|
+
|
|
41
|
+
# Change the semantic type of a column:
|
|
42
|
+
table[column].stype = "text"
|
|
43
|
+
|
|
44
|
+
Args:
|
|
45
|
+
df: The data frame to create this table from.
|
|
46
|
+
name: The name of this table.
|
|
47
|
+
primary_key: The name of the primary key of this table, if it exists.
|
|
48
|
+
time_column: The name of the time column of this table, if it exists.
|
|
49
|
+
end_time_column: The name of the end time column of this table, if it
|
|
50
|
+
exists.
|
|
51
|
+
"""
|
|
52
|
+
def __init__(
|
|
53
|
+
self,
|
|
54
|
+
df: pd.DataFrame,
|
|
55
|
+
name: str,
|
|
56
|
+
primary_key: Optional[str] = None,
|
|
57
|
+
time_column: Optional[str] = None,
|
|
58
|
+
end_time_column: Optional[str] = None,
|
|
59
|
+
) -> None:
|
|
60
|
+
|
|
61
|
+
if df.empty:
|
|
62
|
+
raise ValueError("Data frame must have at least one row")
|
|
63
|
+
if isinstance(df.columns, pd.MultiIndex):
|
|
64
|
+
raise ValueError("Data frame must not have a multi-index")
|
|
65
|
+
if not df.columns.is_unique:
|
|
66
|
+
raise ValueError("Data frame must have unique column names")
|
|
67
|
+
if any(col == '' for col in df.columns):
|
|
68
|
+
raise ValueError("Data frame must have non-empty column names")
|
|
69
|
+
|
|
70
|
+
self._data = df.copy(deep=False)
|
|
71
|
+
|
|
72
|
+
super().__init__(
|
|
73
|
+
name=name,
|
|
74
|
+
columns=list(df.columns),
|
|
75
|
+
primary_key=primary_key,
|
|
76
|
+
time_column=time_column,
|
|
77
|
+
end_time_column=end_time_column,
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
def infer_metadata(self, verbose: bool = True) -> Self:
|
|
81
|
+
r"""Infers metadata, *i.e.*, primary keys and time columns, in the
|
|
82
|
+
table.
|
|
83
|
+
|
|
84
|
+
Args:
|
|
85
|
+
verbose: Whether to print verbose output.
|
|
86
|
+
"""
|
|
87
|
+
logs = []
|
|
88
|
+
|
|
89
|
+
# Try to detect primary key if not set:
|
|
90
|
+
if not self.has_primary_key():
|
|
91
|
+
|
|
92
|
+
def is_candidate(column: Column) -> bool:
|
|
93
|
+
if column.stype == Stype.ID:
|
|
94
|
+
return True
|
|
95
|
+
if all(column.stype != Stype.ID for column in self.columns):
|
|
96
|
+
if self.name == column.name:
|
|
97
|
+
return True
|
|
98
|
+
if (self.name.endswith('s')
|
|
99
|
+
and self.name[:-1] == column.name):
|
|
100
|
+
return True
|
|
101
|
+
return False
|
|
102
|
+
|
|
103
|
+
candidates = [
|
|
104
|
+
column.name for column in self.columns if is_candidate(column)
|
|
105
|
+
]
|
|
106
|
+
|
|
107
|
+
if primary_key := utils.detect_primary_key(
|
|
108
|
+
table_name=self.name,
|
|
109
|
+
df=self._data,
|
|
110
|
+
candidates=candidates,
|
|
111
|
+
):
|
|
112
|
+
self.primary_key = primary_key
|
|
113
|
+
logs.append(f"primary key '{primary_key}'")
|
|
114
|
+
|
|
115
|
+
# Try to detect time column if not set:
|
|
116
|
+
if not self.has_time_column():
|
|
117
|
+
candidates = [
|
|
118
|
+
column.name for column in self.columns
|
|
119
|
+
if column.stype == Stype.timestamp
|
|
120
|
+
and column.name != self._end_time_column
|
|
121
|
+
]
|
|
122
|
+
if time_column := utils.detect_time_column(self._data, candidates):
|
|
123
|
+
self.time_column = time_column
|
|
124
|
+
logs.append(f"time column '{time_column}'")
|
|
125
|
+
|
|
126
|
+
if verbose and len(logs) > 0:
|
|
127
|
+
print(f"Detected {' and '.join(logs)} in table '{self.name}'")
|
|
128
|
+
|
|
129
|
+
return self
|
|
130
|
+
|
|
131
|
+
def _has_source_column(self, name: str) -> bool:
|
|
132
|
+
return name in self._data.columns
|
|
133
|
+
|
|
134
|
+
def _get_source_dtype(self, name: str) -> Dtype:
|
|
135
|
+
return utils.to_dtype(self._data[name])
|
|
136
|
+
|
|
137
|
+
def _get_source_stype(self, name: str, dtype: Dtype) -> Stype:
|
|
138
|
+
return utils.infer_stype(self._data[name], name, dtype)
|
|
139
|
+
|
|
140
|
+
def _infer_primary_key(self, candidates: List[str]) -> Optional[str]:
|
|
141
|
+
return utils.detect_primary_key(
|
|
142
|
+
table_name=self.name,
|
|
143
|
+
df=self._data,
|
|
144
|
+
candidates=candidates,
|
|
145
|
+
)
|
|
146
|
+
|
|
147
|
+
def _infer_time_column(self, candidates: List[str]) -> Optional[str]:
|
|
148
|
+
return utils.detect_time_column(df=self._data, candidates=candidates)
|
|
149
|
+
|
|
150
|
+
def _num_rows(self) -> Optional[int]:
|
|
151
|
+
return len(self._data)
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
from typing import Any, TypeAlias, Union
|
|
3
|
+
|
|
4
|
+
try:
|
|
5
|
+
import adbc_driver_sqlite.dbapi as adbc
|
|
6
|
+
except ImportError:
|
|
7
|
+
raise ImportError("No module named 'adbc_driver_sqlite'. Please install "
|
|
8
|
+
"Kumo SDK with the 'sqlite' extension via "
|
|
9
|
+
"`pip install kumoai[sqlite]`.")
|
|
10
|
+
|
|
11
|
+
Connection: TypeAlias = adbc.AdbcSqliteConnection
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def connect(uri: Union[str, Path, None] = None, **kwargs: Any) -> Connection:
|
|
15
|
+
return adbc.connect(uri, **kwargs)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
from .table import SQLiteTable # noqa: E402
|
|
19
|
+
|
|
20
|
+
__all__ = [
|
|
21
|
+
'Connection',
|
|
22
|
+
'SQLiteTable',
|
|
23
|
+
]
|
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
import re
|
|
2
|
+
from typing import Dict, List, Optional, Sequence
|
|
3
|
+
|
|
4
|
+
import pyarrow as pa
|
|
5
|
+
from kumoapi.typing import Dtype, Stype
|
|
6
|
+
from typing_extensions import Self
|
|
7
|
+
|
|
8
|
+
from kumoai.experimental.rfm import utils
|
|
9
|
+
from kumoai.experimental.rfm.backend.sqlite import Connection
|
|
10
|
+
from kumoai.experimental.rfm.base import Table
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class SQLiteTable(Table):
|
|
14
|
+
r"""A table backed by a :class:`sqlite` database.
|
|
15
|
+
|
|
16
|
+
Args:
|
|
17
|
+
connection: The connection to a :class:`sqlite` database.
|
|
18
|
+
name: The name of this table.
|
|
19
|
+
columns: The selected columns of this table.
|
|
20
|
+
primary_key: The name of the primary key of this table, if it exists.
|
|
21
|
+
time_column: The name of the time column of this table, if it exists.
|
|
22
|
+
end_time_column: The name of the end time column of this table, if it
|
|
23
|
+
exists.
|
|
24
|
+
"""
|
|
25
|
+
def __init__(
|
|
26
|
+
self,
|
|
27
|
+
connection: Connection,
|
|
28
|
+
name: str,
|
|
29
|
+
columns: Optional[Sequence[str]] = None,
|
|
30
|
+
primary_key: Optional[str] = None,
|
|
31
|
+
time_column: Optional[str] = None,
|
|
32
|
+
end_time_column: Optional[str] = None,
|
|
33
|
+
) -> None:
|
|
34
|
+
|
|
35
|
+
self._connection = connection
|
|
36
|
+
self._dtype_dict: Dict[str, Dtype] = {}
|
|
37
|
+
|
|
38
|
+
with connection.cursor() as cursor:
|
|
39
|
+
cursor.execute(f"PRAGMA table_info({name})")
|
|
40
|
+
for _, column, dtype, _, _, is_pkey in cursor.fetchall():
|
|
41
|
+
if bool(is_pkey):
|
|
42
|
+
if primary_key is not None and primary_key != column:
|
|
43
|
+
raise ValueError(f"Found duplicate primary key "
|
|
44
|
+
f"definition '{primary_key}' and "
|
|
45
|
+
f"'{column}' in table '{name}'")
|
|
46
|
+
primary_key = column
|
|
47
|
+
|
|
48
|
+
# Determine colun affinity:
|
|
49
|
+
dtype = dtype.strip().upper()
|
|
50
|
+
if re.search('INT', dtype):
|
|
51
|
+
self._dtype_dict[column] = Dtype.int
|
|
52
|
+
elif re.search('TEXT|CHAR|CLOB', dtype):
|
|
53
|
+
self._dtype_dict[column] = Dtype.string
|
|
54
|
+
elif re.search('REAL|FLOA|DOUB', dtype):
|
|
55
|
+
self._dtype_dict[column] = Dtype.float
|
|
56
|
+
else: # NUMERIC affinity.
|
|
57
|
+
self._dtype_dict[column] = Dtype.unsupported
|
|
58
|
+
|
|
59
|
+
if len(self._dtype_dict) > 0:
|
|
60
|
+
column_names = ', '.join(self._dtype_dict.keys())
|
|
61
|
+
cursor.execute(f"SELECT {column_names} FROM {name} "
|
|
62
|
+
f"ORDER BY rowid LIMIT 1000")
|
|
63
|
+
self._sample = cursor.fetch_arrow_table()
|
|
64
|
+
|
|
65
|
+
for column_name in list(self._dtype_dict.keys()):
|
|
66
|
+
if self._dtype_dict[column_name] == Dtype.unsupported:
|
|
67
|
+
dtype = self._sample[column_name].type
|
|
68
|
+
if pa.types.is_integer(dtype):
|
|
69
|
+
self._dtype_dict[column_name] = Dtype.int
|
|
70
|
+
elif pa.types.is_floating(dtype):
|
|
71
|
+
self._dtype_dict[column_name] = Dtype.float
|
|
72
|
+
elif pa.types.is_decimal(dtype):
|
|
73
|
+
self._dtype_dict[column_name] = Dtype.float
|
|
74
|
+
elif pa.types.is_string(dtype):
|
|
75
|
+
self._dtype_dict[column_name] = Dtype.string
|
|
76
|
+
else:
|
|
77
|
+
del self._dtype_dict[column_name]
|
|
78
|
+
|
|
79
|
+
if len(self._dtype_dict) == 0:
|
|
80
|
+
raise RuntimeError(f"Table '{name}' does not exist or does not "
|
|
81
|
+
f"hold any column with a supported data type")
|
|
82
|
+
|
|
83
|
+
super().__init__(
|
|
84
|
+
name=name,
|
|
85
|
+
columns=columns or list(self._dtype_dict.keys()),
|
|
86
|
+
primary_key=primary_key,
|
|
87
|
+
time_column=time_column,
|
|
88
|
+
end_time_column=end_time_column,
|
|
89
|
+
)
|
|
90
|
+
|
|
91
|
+
def infer_metadata(self, verbose: bool = True) -> Self:
|
|
92
|
+
r"""Infers metadata, *i.e.*, primary keys and time columns, in the
|
|
93
|
+
table.
|
|
94
|
+
|
|
95
|
+
Args:
|
|
96
|
+
verbose: Whether to print verbose output.
|
|
97
|
+
"""
|
|
98
|
+
return self
|
|
99
|
+
|
|
100
|
+
def _has_source_column(self, name: str) -> bool:
|
|
101
|
+
return name in self._dtype_dict
|
|
102
|
+
|
|
103
|
+
def _get_source_dtype(self, name: str) -> Dtype:
|
|
104
|
+
return self._dtype_dict[name]
|
|
105
|
+
|
|
106
|
+
def _get_source_stype(self, name: str, dtype: Dtype) -> Stype:
|
|
107
|
+
ser = self._sample[name].to_pandas()
|
|
108
|
+
return utils.infer_stype(ser, name, dtype)
|
|
109
|
+
|
|
110
|
+
def _infer_primary_key(self, candidates: List[str]) -> Optional[str]:
|
|
111
|
+
return None # TODO
|
|
112
|
+
|
|
113
|
+
def _infer_time_column(self, candidates: List[str]) -> Optional[str]:
|
|
114
|
+
return None # TODO
|
|
115
|
+
|
|
116
|
+
def _num_rows(self) -> Optional[int]:
|
|
117
|
+
return None
|