kumoai 2.10.0.dev202510061830__cp313-cp313-macosx_11_0_arm64.whl → 2.13.0.dev202511261731__cp313-cp313-macosx_11_0_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kumoai/__init__.py +10 -11
- kumoai/_version.py +1 -1
- kumoai/client/client.py +9 -13
- kumoai/client/endpoints.py +1 -0
- kumoai/client/rfm.py +35 -7
- kumoai/experimental/rfm/__init__.py +153 -10
- kumoai/experimental/rfm/infer/timestamp.py +5 -4
- kumoai/experimental/rfm/local_graph.py +90 -74
- kumoai/experimental/rfm/local_graph_sampler.py +16 -10
- kumoai/experimental/rfm/local_graph_store.py +13 -1
- kumoai/experimental/rfm/local_pquery_driver.py +249 -49
- kumoai/experimental/rfm/local_table.py +100 -22
- kumoai/experimental/rfm/pquery/__init__.py +4 -4
- kumoai/experimental/rfm/pquery/{backend.py → executor.py} +24 -58
- kumoai/experimental/rfm/pquery/{pandas_backend.py → pandas_executor.py} +277 -223
- kumoai/experimental/rfm/rfm.py +174 -91
- kumoai/experimental/rfm/sagemaker.py +130 -0
- kumoai/jobs.py +1 -0
- kumoai/spcs.py +1 -3
- kumoai/trainer/trainer.py +9 -10
- kumoai/utils/progress_logger.py +10 -4
- {kumoai-2.10.0.dev202510061830.dist-info → kumoai-2.13.0.dev202511261731.dist-info}/METADATA +13 -5
- {kumoai-2.10.0.dev202510061830.dist-info → kumoai-2.13.0.dev202511261731.dist-info}/RECORD +26 -25
- {kumoai-2.10.0.dev202510061830.dist-info → kumoai-2.13.0.dev202511261731.dist-info}/WHEEL +0 -0
- {kumoai-2.10.0.dev202510061830.dist-info → kumoai-2.13.0.dev202511261731.dist-info}/licenses/LICENSE +0 -0
- {kumoai-2.10.0.dev202510061830.dist-info → kumoai-2.13.0.dev202511261731.dist-info}/top_level.txt +0 -0
kumoai/__init__.py
CHANGED
|
@@ -184,15 +184,12 @@ def init(
|
|
|
184
184
|
snowflake_credentials
|
|
185
185
|
) if not api_key and snowflake_credentials else None
|
|
186
186
|
client = KumoClient(url=url, api_key=api_key, spcs_token=spcs_token)
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
else:
|
|
194
|
-
raise ValueError("Client authentication failed. Please check if you "
|
|
195
|
-
"have a valid API key.")
|
|
187
|
+
client.authenticate()
|
|
188
|
+
global_state._url = client._url
|
|
189
|
+
global_state._api_key = client._api_key
|
|
190
|
+
global_state._snowflake_credentials = snowflake_credentials
|
|
191
|
+
global_state._spcs_token = client._spcs_token
|
|
192
|
+
global_state._snowpark_session = snowpark_session
|
|
196
193
|
|
|
197
194
|
if not api_key and snowflake_credentials:
|
|
198
195
|
# Refresh token every 10 minutes (expires in 1 hour):
|
|
@@ -200,9 +197,11 @@ def init(
|
|
|
200
197
|
|
|
201
198
|
logger = logging.getLogger('kumoai')
|
|
202
199
|
log_level = logging.getLevelName(logger.getEffectiveLevel())
|
|
200
|
+
|
|
203
201
|
logger.info(
|
|
204
|
-
"Successfully initialized the Kumo SDK
|
|
205
|
-
"
|
|
202
|
+
f"Successfully initialized the Kumo SDK (version {__version__}) "
|
|
203
|
+
f"against deployment {url}, with "
|
|
204
|
+
f"log level {log_level}.")
|
|
206
205
|
|
|
207
206
|
|
|
208
207
|
def set_log_level(level: str) -> None:
|
kumoai/_version.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = '2.
|
|
1
|
+
__version__ = '2.13.0.dev202511261731'
|
kumoai/client/client.py
CHANGED
|
@@ -20,7 +20,6 @@ if TYPE_CHECKING:
|
|
|
20
20
|
)
|
|
21
21
|
from kumoai.client.online import OnlineServingEndpointAPI
|
|
22
22
|
from kumoai.client.pquery import PQueryAPI
|
|
23
|
-
from kumoai.client.rfm import RFMAPI
|
|
24
23
|
from kumoai.client.source_table import SourceTableAPI
|
|
25
24
|
from kumoai.client.table import TableAPI
|
|
26
25
|
|
|
@@ -73,12 +72,15 @@ class KumoClient:
|
|
|
73
72
|
self._session.headers.update(
|
|
74
73
|
{'Authorization': f'Snowflake Token={self._spcs_token}'})
|
|
75
74
|
|
|
76
|
-
def authenticate(self) ->
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
75
|
+
def authenticate(self) -> None:
|
|
76
|
+
"""Raises an exception if authentication fails."""
|
|
77
|
+
try:
|
|
78
|
+
self._session.get(self._url + '/v1/connectors',
|
|
79
|
+
verify=self._verify_ssl).raise_for_status()
|
|
80
|
+
except Exception:
|
|
81
|
+
raise ValueError(
|
|
82
|
+
"Client authentication failed. Please check if you "
|
|
83
|
+
"have a valid API key/credentials.")
|
|
82
84
|
|
|
83
85
|
def set_spcs_token(self, spcs_token: str) -> None:
|
|
84
86
|
r"""Sets the SPCS token for the client and updates the session
|
|
@@ -163,12 +165,6 @@ class KumoClient:
|
|
|
163
165
|
from kumoai.client.online import OnlineServingEndpointAPI
|
|
164
166
|
return OnlineServingEndpointAPI(self)
|
|
165
167
|
|
|
166
|
-
@property
|
|
167
|
-
def rfm_api(self) -> 'RFMAPI':
|
|
168
|
-
r"""Returns the typed RFM API."""
|
|
169
|
-
from kumoai.client.rfm import RFMAPI
|
|
170
|
-
return RFMAPI(self)
|
|
171
|
-
|
|
172
168
|
def _request(self, endpoint: Endpoint, **kwargs: Any) -> requests.Response:
|
|
173
169
|
r"""Send a HTTP request to the specified endpoint."""
|
|
174
170
|
endpoint_str = endpoint.get_path()
|
kumoai/client/endpoints.py
CHANGED
|
@@ -147,3 +147,4 @@ class RFMEndpoints:
|
|
|
147
147
|
explain = Endpoint(f"{BASE}/explain", HTTPMethod.POST)
|
|
148
148
|
evaluate = Endpoint(f"{BASE}/evaluate", HTTPMethod.POST)
|
|
149
149
|
validate_query = Endpoint(f"{BASE}/validate_query", HTTPMethod.POST)
|
|
150
|
+
parse_query = Endpoint(f"{BASE}/parse_query", HTTPMethod.POST)
|
kumoai/client/rfm.py
CHANGED
|
@@ -1,7 +1,11 @@
|
|
|
1
|
+
from typing import Any
|
|
2
|
+
|
|
1
3
|
from kumoapi.json_serde import to_json_dict
|
|
2
4
|
from kumoapi.rfm import (
|
|
3
5
|
RFMEvaluateResponse,
|
|
4
6
|
RFMExplanationResponse,
|
|
7
|
+
RFMParseQueryRequest,
|
|
8
|
+
RFMParseQueryResponse,
|
|
5
9
|
RFMPredictResponse,
|
|
6
10
|
RFMValidateQueryRequest,
|
|
7
11
|
RFMValidateQueryResponse,
|
|
@@ -26,25 +30,32 @@ class RFMAPI:
|
|
|
26
30
|
Returns:
|
|
27
31
|
RFMPredictResponse containing the predictions
|
|
28
32
|
"""
|
|
29
|
-
# Send binary data to the predict endpoint
|
|
30
33
|
response = self._client._request(
|
|
31
|
-
RFMEndpoints.predict,
|
|
32
|
-
|
|
34
|
+
RFMEndpoints.predict,
|
|
35
|
+
data=request,
|
|
36
|
+
headers={'Content-Type': 'application/x-protobuf'},
|
|
37
|
+
)
|
|
33
38
|
raise_on_error(response)
|
|
34
39
|
return parse_response(RFMPredictResponse, response)
|
|
35
40
|
|
|
36
|
-
def explain(
|
|
41
|
+
def explain(
|
|
42
|
+
self,
|
|
43
|
+
request: bytes,
|
|
44
|
+
skip_summary: bool = False,
|
|
45
|
+
) -> RFMExplanationResponse:
|
|
37
46
|
"""Explain the RFM model on the given context.
|
|
38
47
|
|
|
39
48
|
Args:
|
|
40
49
|
request: The predict request as serialized protobuf.
|
|
50
|
+
skip_summary: Whether to skip generating a human-readable summary
|
|
51
|
+
of the explanation.
|
|
41
52
|
|
|
42
53
|
Returns:
|
|
43
54
|
RFMPredictResponse containing the explanations
|
|
44
55
|
"""
|
|
45
|
-
|
|
56
|
+
params: dict[str, Any] = {'generate_summary': not skip_summary}
|
|
46
57
|
response = self._client._request(
|
|
47
|
-
RFMEndpoints.explain, data=request,
|
|
58
|
+
RFMEndpoints.explain, data=request, params=params,
|
|
48
59
|
headers={'Content-Type': 'application/x-protobuf'})
|
|
49
60
|
raise_on_error(response)
|
|
50
61
|
return parse_response(RFMExplanationResponse, response)
|
|
@@ -58,7 +69,6 @@ class RFMAPI:
|
|
|
58
69
|
Returns:
|
|
59
70
|
RFMEvaluateResponse containing the computed metrics
|
|
60
71
|
"""
|
|
61
|
-
# Send binary data to the evaluate endpoint
|
|
62
72
|
response = self._client._request(
|
|
63
73
|
RFMEndpoints.evaluate, data=request,
|
|
64
74
|
headers={'Content-Type': 'application/x-protobuf'})
|
|
@@ -82,3 +92,21 @@ class RFMAPI:
|
|
|
82
92
|
json=to_json_dict(request))
|
|
83
93
|
raise_on_error(response)
|
|
84
94
|
return parse_response(RFMValidateQueryResponse, response)
|
|
95
|
+
|
|
96
|
+
def parse_query(
|
|
97
|
+
self,
|
|
98
|
+
request: RFMParseQueryRequest,
|
|
99
|
+
) -> RFMParseQueryResponse:
|
|
100
|
+
"""Validate a predictive query against a graph.
|
|
101
|
+
|
|
102
|
+
Args:
|
|
103
|
+
request: The request object containing
|
|
104
|
+
the query and graph definition
|
|
105
|
+
|
|
106
|
+
Returns:
|
|
107
|
+
RFMParseQueryResponse containing the QueryDefinition
|
|
108
|
+
"""
|
|
109
|
+
response = self._client._request(RFMEndpoints.parse_query,
|
|
110
|
+
json=to_json_dict(request))
|
|
111
|
+
raise_on_error(response)
|
|
112
|
+
return parse_response(RFMParseQueryResponse, response)
|
|
@@ -12,7 +12,7 @@ CPU architecture: {platform.machine()}
|
|
|
12
12
|
glibc version: {platform.libc_ver()[1]}
|
|
13
13
|
|
|
14
14
|
✅ Supported Environments:
|
|
15
|
-
* Python versions: 3.
|
|
15
|
+
* Python versions: 3.10, 3.11, 3.12, 3.13
|
|
16
16
|
* Operating systems and CPU architectures:
|
|
17
17
|
* Linux (x86_64)
|
|
18
18
|
* macOS (arm64)
|
|
@@ -20,7 +20,7 @@ glibc version: {platform.libc_ver()[1]}
|
|
|
20
20
|
* glibc versions: >=2.28
|
|
21
21
|
|
|
22
22
|
❌ Unsupported Environments:
|
|
23
|
-
* Python versions: 3.8, 3.14
|
|
23
|
+
* Python versions: 3.8, 3.9, 3.14
|
|
24
24
|
* Operating systems and CPU architectures:
|
|
25
25
|
* Linux (arm64)
|
|
26
26
|
* macOS (x86_64)
|
|
@@ -31,14 +31,122 @@ Please create a feature request at 'https://github.com/kumo-ai/kumo-rfm'."""
|
|
|
31
31
|
|
|
32
32
|
raise RuntimeError(_msg) from e
|
|
33
33
|
|
|
34
|
-
from
|
|
34
|
+
from dataclasses import dataclass
|
|
35
|
+
from enum import Enum
|
|
36
|
+
import ipaddress
|
|
37
|
+
import logging
|
|
38
|
+
import re
|
|
39
|
+
import socket
|
|
40
|
+
import threading
|
|
41
|
+
from typing import Optional, Dict, Tuple
|
|
35
42
|
import os
|
|
43
|
+
from urllib.parse import urlparse
|
|
36
44
|
import kumoai
|
|
45
|
+
from kumoai.client.client import KumoClient
|
|
46
|
+
from .sagemaker import (KumoClient_SageMakerAdapter,
|
|
47
|
+
KumoClient_SageMakerProxy_Local)
|
|
37
48
|
from .local_table import LocalTable
|
|
38
49
|
from .local_graph import LocalGraph
|
|
39
|
-
from .rfm import KumoRFM
|
|
50
|
+
from .rfm import ExplainConfig, Explanation, KumoRFM
|
|
40
51
|
from .authenticate import authenticate
|
|
41
52
|
|
|
53
|
+
logger = logging.getLogger('kumoai_rfm')
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
def _is_local_address(host: str | None) -> bool:
|
|
57
|
+
"""Return True if the hostname/IP refers to the local machine."""
|
|
58
|
+
if not host:
|
|
59
|
+
return False
|
|
60
|
+
try:
|
|
61
|
+
infos = socket.getaddrinfo(host, None)
|
|
62
|
+
for _, _, _, _, sockaddr in infos:
|
|
63
|
+
ip = sockaddr[0]
|
|
64
|
+
ip_obj = ipaddress.ip_address(ip)
|
|
65
|
+
if ip_obj.is_loopback or ip_obj.is_unspecified:
|
|
66
|
+
return True
|
|
67
|
+
return False
|
|
68
|
+
except Exception:
|
|
69
|
+
return False
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
class InferenceBackend(str, Enum):
|
|
73
|
+
REST = "REST"
|
|
74
|
+
LOCAL_SAGEMAKER = "LOCAL_SAGEMAKER"
|
|
75
|
+
AWS_SAGEMAKER = "AWS_SAGEMAKER"
|
|
76
|
+
UNKNOWN = "UNKNOWN"
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
def _detect_backend(
|
|
80
|
+
url: str) -> Tuple[InferenceBackend, Optional[str], Optional[str]]:
|
|
81
|
+
parsed = urlparse(url)
|
|
82
|
+
|
|
83
|
+
# Remote SageMaker
|
|
84
|
+
if ("runtime.sagemaker" in parsed.netloc
|
|
85
|
+
and parsed.path.endswith("/invocations")):
|
|
86
|
+
# Example: https://runtime.sagemaker.us-west-2.amazonaws.com/
|
|
87
|
+
# endpoints/Name/invocations
|
|
88
|
+
match = re.search(r"runtime\.sagemaker\.([a-z0-9-]+)\.amazonaws\.com",
|
|
89
|
+
parsed.netloc)
|
|
90
|
+
region = match.group(1) if match else None
|
|
91
|
+
m = re.search(r"/endpoints/([^/]+)/invocations", parsed.path)
|
|
92
|
+
endpoint_name = m.group(1) if m else None
|
|
93
|
+
return InferenceBackend.AWS_SAGEMAKER, region, endpoint_name
|
|
94
|
+
|
|
95
|
+
# Local SageMaker
|
|
96
|
+
if parsed.port == 8080 and parsed.path.endswith(
|
|
97
|
+
"/invocations") and _is_local_address(parsed.hostname):
|
|
98
|
+
return InferenceBackend.LOCAL_SAGEMAKER, None, None
|
|
99
|
+
|
|
100
|
+
# Default: regular REST
|
|
101
|
+
return InferenceBackend.REST, None, None
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
@dataclass
|
|
105
|
+
class RfmGlobalState:
|
|
106
|
+
_url: str = '__url_not_provided__'
|
|
107
|
+
_backend: InferenceBackend = InferenceBackend.UNKNOWN
|
|
108
|
+
_region: Optional[str] = None
|
|
109
|
+
_endpoint_name: Optional[str] = None
|
|
110
|
+
_thread_local = threading.local()
|
|
111
|
+
|
|
112
|
+
# Thread-safe init-once.
|
|
113
|
+
_initialized: bool = False
|
|
114
|
+
_lock: threading.Lock = threading.Lock()
|
|
115
|
+
|
|
116
|
+
@property
|
|
117
|
+
def client(self) -> KumoClient:
|
|
118
|
+
if self._backend == InferenceBackend.REST:
|
|
119
|
+
return kumoai.global_state.client
|
|
120
|
+
|
|
121
|
+
if hasattr(self._thread_local, '_sagemaker'):
|
|
122
|
+
# Set the spcs token in the client to ensure it has the latest.
|
|
123
|
+
return self._thread_local._sagemaker
|
|
124
|
+
|
|
125
|
+
sagemaker_client: KumoClient
|
|
126
|
+
if self._backend == InferenceBackend.LOCAL_SAGEMAKER:
|
|
127
|
+
sagemaker_client = KumoClient_SageMakerProxy_Local(self._url)
|
|
128
|
+
else:
|
|
129
|
+
assert self._backend == InferenceBackend.AWS_SAGEMAKER
|
|
130
|
+
assert self._region
|
|
131
|
+
assert self._endpoint_name
|
|
132
|
+
sagemaker_client = KumoClient_SageMakerAdapter(
|
|
133
|
+
self._region, self._endpoint_name)
|
|
134
|
+
|
|
135
|
+
self._thread_local._sagemaker = sagemaker_client
|
|
136
|
+
return sagemaker_client
|
|
137
|
+
|
|
138
|
+
def reset(self) -> None: # For testing only.
|
|
139
|
+
with self._lock:
|
|
140
|
+
self._initialized = False
|
|
141
|
+
self._url = '__url_not_provided__'
|
|
142
|
+
self._backend = InferenceBackend.UNKNOWN
|
|
143
|
+
self._region = None
|
|
144
|
+
self._endpoint_name = None
|
|
145
|
+
self._thread_local = threading.local()
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
global_state = RfmGlobalState()
|
|
149
|
+
|
|
42
150
|
|
|
43
151
|
def init(
|
|
44
152
|
url: Optional[str] = None,
|
|
@@ -47,19 +155,54 @@ def init(
|
|
|
47
155
|
snowflake_application: Optional[str] = None,
|
|
48
156
|
log_level: str = "INFO",
|
|
49
157
|
) -> None:
|
|
50
|
-
|
|
51
|
-
|
|
158
|
+
with global_state._lock:
|
|
159
|
+
if global_state._initialized:
|
|
160
|
+
if url != global_state._url:
|
|
161
|
+
raise ValueError(
|
|
162
|
+
"Kumo RFM has already been initialized with a different "
|
|
163
|
+
"URL. Re-initialization with a different URL is not "
|
|
164
|
+
"supported.")
|
|
165
|
+
return
|
|
166
|
+
|
|
167
|
+
if url is None:
|
|
168
|
+
url = os.getenv("RFM_API_URL", "https://kumorfm.ai/api")
|
|
169
|
+
|
|
170
|
+
backend, region, endpoint_name = _detect_backend(url)
|
|
171
|
+
if backend == InferenceBackend.REST:
|
|
172
|
+
# Initialize kumoai.global_state
|
|
173
|
+
if (kumoai.global_state.initialized
|
|
174
|
+
and kumoai.global_state._url != url):
|
|
175
|
+
raise ValueError(
|
|
176
|
+
"Kumo AI SDK has already been initialized with different "
|
|
177
|
+
"API URL. Please restart Python interpreter and "
|
|
178
|
+
"initialize via kumoai.rfm.init()")
|
|
179
|
+
kumoai.init(url=url, api_key=api_key,
|
|
180
|
+
snowflake_credentials=snowflake_credentials,
|
|
181
|
+
snowflake_application=snowflake_application,
|
|
182
|
+
log_level=log_level)
|
|
183
|
+
elif backend == InferenceBackend.AWS_SAGEMAKER:
|
|
184
|
+
assert region
|
|
185
|
+
assert endpoint_name
|
|
186
|
+
KumoClient_SageMakerAdapter(region, endpoint_name).authenticate()
|
|
187
|
+
else:
|
|
188
|
+
assert backend == InferenceBackend.LOCAL_SAGEMAKER
|
|
189
|
+
KumoClient_SageMakerProxy_Local(url).authenticate()
|
|
52
190
|
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
191
|
+
global_state._url = url
|
|
192
|
+
global_state._backend = backend
|
|
193
|
+
global_state._region = region
|
|
194
|
+
global_state._endpoint_name = endpoint_name
|
|
195
|
+
global_state._initialized = True
|
|
196
|
+
logger.info("Kumo RFM initialized with backend: %s, url: %s", backend,
|
|
197
|
+
url)
|
|
57
198
|
|
|
58
199
|
|
|
59
200
|
__all__ = [
|
|
60
201
|
'LocalTable',
|
|
61
202
|
'LocalGraph',
|
|
62
203
|
'KumoRFM',
|
|
204
|
+
'ExplainConfig',
|
|
205
|
+
'Explanation',
|
|
63
206
|
'authenticate',
|
|
64
207
|
'init',
|
|
65
208
|
]
|
|
@@ -21,9 +21,7 @@ def contains_timestamp(ser: pd.Series, column_name: str, dtype: Dtype) -> bool:
|
|
|
21
21
|
column_name,
|
|
22
22
|
re.IGNORECASE,
|
|
23
23
|
)
|
|
24
|
-
|
|
25
|
-
if match is not None:
|
|
26
|
-
return True
|
|
24
|
+
score = 0.3 if match is not None else 0.0
|
|
27
25
|
|
|
28
26
|
ser = ser.iloc[:100]
|
|
29
27
|
ser = ser.dropna()
|
|
@@ -37,4 +35,7 @@ def contains_timestamp(ser: pd.Series, column_name: str, dtype: Dtype) -> bool:
|
|
|
37
35
|
with warnings.catch_warnings():
|
|
38
36
|
warnings.simplefilter('ignore', UnknownTimezoneWarning)
|
|
39
37
|
warnings.filterwarnings('ignore', message='Could not infer format')
|
|
40
|
-
|
|
38
|
+
mask = pd.to_datetime(ser, errors='coerce').notna()
|
|
39
|
+
score += int(mask.sum()) / len(mask)
|
|
40
|
+
|
|
41
|
+
return score >= 1.0
|
|
@@ -29,45 +29,46 @@ class LocalGraph:
|
|
|
29
29
|
|
|
30
30
|
.. code-block:: python
|
|
31
31
|
|
|
32
|
-
|
|
33
|
-
import
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
graph
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
graph
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
graph.
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
graph
|
|
32
|
+
>>> # doctest: +SKIP
|
|
33
|
+
>>> import pandas as pd
|
|
34
|
+
>>> import kumoai.experimental.rfm as rfm
|
|
35
|
+
|
|
36
|
+
>>> # Load data frames into memory:
|
|
37
|
+
>>> df1 = pd.DataFrame(...)
|
|
38
|
+
>>> df2 = pd.DataFrame(...)
|
|
39
|
+
>>> df3 = pd.DataFrame(...)
|
|
40
|
+
|
|
41
|
+
>>> # Define tables from data frames:
|
|
42
|
+
>>> table1 = rfm.LocalTable(name="table1", data=df1)
|
|
43
|
+
>>> table2 = rfm.LocalTable(name="table2", data=df2)
|
|
44
|
+
>>> table3 = rfm.LocalTable(name="table3", data=df3)
|
|
45
|
+
|
|
46
|
+
>>> # Create a graph from a dictionary of tables:
|
|
47
|
+
>>> graph = rfm.LocalGraph({
|
|
48
|
+
... "table1": table1,
|
|
49
|
+
... "table2": table2,
|
|
50
|
+
... "table3": table3,
|
|
51
|
+
... })
|
|
52
|
+
|
|
53
|
+
>>> # Infer table metadata:
|
|
54
|
+
>>> graph.infer_metadata()
|
|
55
|
+
|
|
56
|
+
>>> # Infer links/edges:
|
|
57
|
+
>>> graph.infer_links()
|
|
58
|
+
|
|
59
|
+
>>> # Inspect table metadata:
|
|
60
|
+
>>> for table in graph.tables.values():
|
|
61
|
+
... table.print_metadata()
|
|
62
|
+
|
|
63
|
+
>>> # Visualize graph (if graphviz is installed):
|
|
64
|
+
>>> graph.visualize()
|
|
65
|
+
|
|
66
|
+
>>> # Add/Remove edges between tables:
|
|
67
|
+
>>> graph.link(src_table="table1", fkey="id1", dst_table="table2")
|
|
68
|
+
>>> graph.unlink(src_table="table1", fkey="id1", dst_table="table2")
|
|
69
|
+
|
|
70
|
+
>>> # Validate graph:
|
|
71
|
+
>>> graph.validate()
|
|
71
72
|
"""
|
|
72
73
|
|
|
73
74
|
# Constructors ############################################################
|
|
@@ -104,27 +105,28 @@ class LocalGraph:
|
|
|
104
105
|
|
|
105
106
|
.. code-block:: python
|
|
106
107
|
|
|
107
|
-
|
|
108
|
-
import
|
|
108
|
+
>>> # doctest: +SKIP
|
|
109
|
+
>>> import pandas as pd
|
|
110
|
+
>>> import kumoai.experimental.rfm as rfm
|
|
109
111
|
|
|
110
|
-
# Load data frames into memory:
|
|
111
|
-
df1 = pd.DataFrame(...)
|
|
112
|
-
df2 = pd.DataFrame(...)
|
|
113
|
-
df3 = pd.DataFrame(...)
|
|
112
|
+
>>> # Load data frames into memory:
|
|
113
|
+
>>> df1 = pd.DataFrame(...)
|
|
114
|
+
>>> df2 = pd.DataFrame(...)
|
|
115
|
+
>>> df3 = pd.DataFrame(...)
|
|
114
116
|
|
|
115
|
-
# Create a graph from a dictionary of data frames:
|
|
116
|
-
graph = rfm.LocalGraph.from_data({
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
})
|
|
117
|
+
>>> # Create a graph from a dictionary of data frames:
|
|
118
|
+
>>> graph = rfm.LocalGraph.from_data({
|
|
119
|
+
... "table1": df1,
|
|
120
|
+
... "table2": df2,
|
|
121
|
+
... "table3": df3,
|
|
122
|
+
... })
|
|
121
123
|
|
|
122
|
-
# Inspect table metadata:
|
|
123
|
-
for table in graph.tables.values():
|
|
124
|
-
|
|
124
|
+
>>> # Inspect table metadata:
|
|
125
|
+
>>> for table in graph.tables.values():
|
|
126
|
+
... table.print_metadata()
|
|
125
127
|
|
|
126
|
-
# Visualize graph (if graphviz is installed):
|
|
127
|
-
graph.visualize()
|
|
128
|
+
>>> # Visualize graph (if graphviz is installed):
|
|
129
|
+
>>> graph.visualize()
|
|
128
130
|
|
|
129
131
|
Args:
|
|
130
132
|
df_dict: A dictionary of data frames, where the keys are the names
|
|
@@ -141,6 +143,7 @@ class LocalGraph:
|
|
|
141
143
|
graph.
|
|
142
144
|
|
|
143
145
|
Example:
|
|
146
|
+
>>> # doctest: +SKIP
|
|
144
147
|
>>> import kumoai.experimental.rfm as rfm
|
|
145
148
|
>>> df1 = pd.DataFrame(...)
|
|
146
149
|
>>> df2 = pd.DataFrame(...)
|
|
@@ -150,7 +153,7 @@ class LocalGraph:
|
|
|
150
153
|
... "table2": df2,
|
|
151
154
|
... "table3": df3,
|
|
152
155
|
... })
|
|
153
|
-
|
|
156
|
+
>>> graph.validate()
|
|
154
157
|
"""
|
|
155
158
|
tables = [LocalTable(df, name) for name, df in df_dict.items()]
|
|
156
159
|
|
|
@@ -231,16 +234,17 @@ class LocalGraph:
|
|
|
231
234
|
r"""Returns a :class:`pandas.DataFrame` object containing metadata
|
|
232
235
|
information about the tables in this graph.
|
|
233
236
|
|
|
234
|
-
The returned dataframe has columns ``name``, ``primary_key``,
|
|
235
|
-
``time_column``, which provide an aggregate
|
|
236
|
-
the tables of this graph.
|
|
237
|
+
The returned dataframe has columns ``name``, ``primary_key``,
|
|
238
|
+
``time_column``, and ``end_time_column``, which provide an aggregate
|
|
239
|
+
view of the properties of the tables of this graph.
|
|
237
240
|
|
|
238
241
|
Example:
|
|
242
|
+
>>> # doctest: +SKIP
|
|
239
243
|
>>> import kumoai.experimental.rfm as rfm
|
|
240
244
|
>>> graph = rfm.LocalGraph(tables=...).infer_metadata()
|
|
241
|
-
>>> graph.metadata
|
|
242
|
-
name
|
|
243
|
-
0 users
|
|
245
|
+
>>> graph.metadata # doctest: +SKIP
|
|
246
|
+
name primary_key time_column end_time_column
|
|
247
|
+
0 users user_id - -
|
|
244
248
|
"""
|
|
245
249
|
tables = list(self.tables.values())
|
|
246
250
|
|
|
@@ -251,6 +255,11 @@ class LocalGraph:
|
|
|
251
255
|
pd.Series(dtype=str, data=[t._primary_key or '-' for t in tables]),
|
|
252
256
|
'time_column':
|
|
253
257
|
pd.Series(dtype=str, data=[t._time_column or '-' for t in tables]),
|
|
258
|
+
'end_time_column':
|
|
259
|
+
pd.Series(
|
|
260
|
+
dtype=str,
|
|
261
|
+
data=[t._end_time_column or '-' for t in tables],
|
|
262
|
+
),
|
|
254
263
|
})
|
|
255
264
|
|
|
256
265
|
def print_metadata(self) -> None:
|
|
@@ -602,8 +611,8 @@ class LocalGraph:
|
|
|
602
611
|
raise ValueError(f"{edge} is invalid as foreign key "
|
|
603
612
|
f"'{fkey}' and primary key '{dst_key.name}' "
|
|
604
613
|
f"have incompatible data types (got "
|
|
605
|
-
f"fkey.dtype '{
|
|
606
|
-
f"pkey.dtype '{
|
|
614
|
+
f"fkey.dtype '{src_key.dtype}' and "
|
|
615
|
+
f"pkey.dtype '{dst_key.dtype}')")
|
|
607
616
|
|
|
608
617
|
return self
|
|
609
618
|
|
|
@@ -676,6 +685,11 @@ class LocalGraph:
|
|
|
676
685
|
]
|
|
677
686
|
if time_column := table.time_column:
|
|
678
687
|
keys += [f'{time_column.name}: Time ({time_column.dtype})']
|
|
688
|
+
if end_time_column := table.end_time_column:
|
|
689
|
+
keys += [
|
|
690
|
+
f'{end_time_column.name}: '
|
|
691
|
+
f'End Time ({end_time_column.dtype})'
|
|
692
|
+
]
|
|
679
693
|
key_repr = left_align(keys)
|
|
680
694
|
|
|
681
695
|
columns = []
|
|
@@ -683,9 +697,9 @@ class LocalGraph:
|
|
|
683
697
|
columns += [
|
|
684
698
|
f'{column.name}: {column.stype} ({column.dtype})'
|
|
685
699
|
for column in table.columns
|
|
686
|
-
if column.name not in fkeys_dict[table_name]
|
|
687
|
-
and column.name != table.
|
|
688
|
-
and column.name != table.
|
|
700
|
+
if column.name not in fkeys_dict[table_name] and
|
|
701
|
+
column.name != table._primary_key and column.name != table.
|
|
702
|
+
_time_column and column.name != table._end_time_column
|
|
689
703
|
]
|
|
690
704
|
column_repr = left_align(columns)
|
|
691
705
|
|
|
@@ -752,16 +766,18 @@ class LocalGraph:
|
|
|
752
766
|
def _to_api_graph_definition(self) -> GraphDefinition:
|
|
753
767
|
tables: Dict[str, TableDefinition] = {}
|
|
754
768
|
col_groups: List[ColumnKeyGroup] = []
|
|
755
|
-
for
|
|
756
|
-
tables[
|
|
769
|
+
for table_name, table in self.tables.items():
|
|
770
|
+
tables[table_name] = table._to_api_table_definition()
|
|
757
771
|
if table.primary_key is None:
|
|
758
772
|
continue
|
|
759
|
-
keys = [ColumnKey(
|
|
773
|
+
keys = [ColumnKey(table_name, table.primary_key.name)]
|
|
760
774
|
for edge in self.edges:
|
|
761
|
-
if edge.dst_table ==
|
|
775
|
+
if edge.dst_table == table_name:
|
|
762
776
|
keys.append(ColumnKey(edge.src_table, edge.fkey))
|
|
763
|
-
keys = sorted(
|
|
764
|
-
|
|
777
|
+
keys = sorted(
|
|
778
|
+
list(set(keys)),
|
|
779
|
+
key=lambda x: f'{x.table_name}.{x.col_name}',
|
|
780
|
+
)
|
|
765
781
|
if len(keys) > 1:
|
|
766
782
|
col_groups.append(ColumnKeyGroup(keys))
|
|
767
783
|
return GraphDefinition(tables, col_groups)
|