kumoai 2.7.0.dev202508201830__cp312-cp312-win_amd64.whl → 2.12.0.dev202511111731__cp312-cp312-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kumoai/__init__.py +4 -2
- kumoai/_version.py +1 -1
- kumoai/client/client.py +10 -5
- kumoai/client/endpoints.py +1 -0
- kumoai/client/rfm.py +37 -8
- kumoai/connector/file_upload_connector.py +94 -85
- kumoai/connector/snowflake_connector.py +9 -0
- kumoai/connector/utils.py +1377 -209
- kumoai/experimental/rfm/__init__.py +5 -3
- kumoai/experimental/rfm/authenticate.py +8 -5
- kumoai/experimental/rfm/infer/timestamp.py +7 -4
- kumoai/experimental/rfm/local_graph.py +96 -82
- kumoai/experimental/rfm/local_graph_sampler.py +16 -8
- kumoai/experimental/rfm/local_graph_store.py +32 -10
- kumoai/experimental/rfm/local_pquery_driver.py +342 -46
- kumoai/experimental/rfm/local_table.py +142 -45
- kumoai/experimental/rfm/pquery/__init__.py +4 -4
- kumoai/experimental/rfm/pquery/{backend.py → executor.py} +28 -58
- kumoai/experimental/rfm/pquery/pandas_executor.py +532 -0
- kumoai/experimental/rfm/rfm.py +535 -125
- kumoai/experimental/rfm/utils.py +0 -3
- kumoai/jobs.py +27 -1
- kumoai/kumolib.cp312-win_amd64.pyd +0 -0
- kumoai/pquery/prediction_table.py +5 -3
- kumoai/pquery/training_table.py +5 -3
- kumoai/trainer/job.py +9 -30
- kumoai/trainer/trainer.py +19 -10
- kumoai/utils/__init__.py +2 -1
- kumoai/utils/progress_logger.py +96 -16
- {kumoai-2.7.0.dev202508201830.dist-info → kumoai-2.12.0.dev202511111731.dist-info}/METADATA +4 -5
- {kumoai-2.7.0.dev202508201830.dist-info → kumoai-2.12.0.dev202511111731.dist-info}/RECORD +34 -34
- kumoai/experimental/rfm/pquery/pandas_backend.py +0 -437
- {kumoai-2.7.0.dev202508201830.dist-info → kumoai-2.12.0.dev202511111731.dist-info}/WHEEL +0 -0
- {kumoai-2.7.0.dev202508201830.dist-info → kumoai-2.12.0.dev202511111731.dist-info}/licenses/LICENSE +0 -0
- {kumoai-2.7.0.dev202508201830.dist-info → kumoai-2.12.0.dev202511111731.dist-info}/top_level.txt +0 -0
kumoai/__init__.py
CHANGED
|
@@ -200,9 +200,11 @@ def init(
|
|
|
200
200
|
|
|
201
201
|
logger = logging.getLogger('kumoai')
|
|
202
202
|
log_level = logging.getLevelName(logger.getEffectiveLevel())
|
|
203
|
+
|
|
203
204
|
logger.info(
|
|
204
|
-
"Successfully initialized the Kumo SDK
|
|
205
|
-
"
|
|
205
|
+
f"Successfully initialized the Kumo SDK (version {__version__}) "
|
|
206
|
+
f"against deployment {url}, with "
|
|
207
|
+
f"log level {log_level}.")
|
|
206
208
|
|
|
207
209
|
|
|
208
210
|
def set_log_level(level: str) -> None:
|
kumoai/_version.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = '2.
|
|
1
|
+
__version__ = '2.12.0.dev202511111731'
|
kumoai/client/client.py
CHANGED
|
@@ -33,6 +33,7 @@ class KumoClient:
|
|
|
33
33
|
url: str,
|
|
34
34
|
api_key: Optional[str],
|
|
35
35
|
spcs_token: Optional[str] = None,
|
|
36
|
+
verify_ssl: bool = True,
|
|
36
37
|
) -> None:
|
|
37
38
|
r"""Creates a client against the Kumo public API, provided a URL of
|
|
38
39
|
the endpoint and an authentication token.
|
|
@@ -42,11 +43,14 @@ class KumoClient:
|
|
|
42
43
|
api_key: the public API authentication token.
|
|
43
44
|
spcs_token: the SPCS token used for authentication to access the
|
|
44
45
|
Kumo API endpoint.
|
|
46
|
+
verify_ssl: whether to verify SSL certificates. Set to False to
|
|
47
|
+
skip SSL certificate verification (equivalent to curl -k).
|
|
45
48
|
"""
|
|
46
49
|
self._url = url
|
|
47
50
|
self._api_url = f"{url}/{API_VERSION}"
|
|
48
51
|
self._api_key = api_key
|
|
49
52
|
self._spcs_token = spcs_token
|
|
53
|
+
self._verify_ssl = verify_ssl
|
|
50
54
|
|
|
51
55
|
retry_strategy = Retry(
|
|
52
56
|
total=10, # Maximum number of retries
|
|
@@ -73,7 +77,8 @@ class KumoClient:
|
|
|
73
77
|
r"""Raises an exception if authentication fails. Succeeds if the
|
|
74
78
|
client is properly formed.
|
|
75
79
|
"""
|
|
76
|
-
return self._session.get(f"{self._url}/v1/connectors"
|
|
80
|
+
return self._session.get(f"{self._url}/v1/connectors",
|
|
81
|
+
verify=self._verify_ssl).ok
|
|
77
82
|
|
|
78
83
|
def set_spcs_token(self, spcs_token: str) -> None:
|
|
79
84
|
r"""Sets the SPCS token for the client and updates the session
|
|
@@ -184,7 +189,7 @@ class KumoClient:
|
|
|
184
189
|
:meth:`requests.Session.get`.
|
|
185
190
|
"""
|
|
186
191
|
url = self._format_endpoint_url(endpoint)
|
|
187
|
-
return self._session.get(url=url, **kwargs)
|
|
192
|
+
return self._session.get(url=url, verify=self._verify_ssl, **kwargs)
|
|
188
193
|
|
|
189
194
|
def _post(self, endpoint: str, **kwargs: Any) -> requests.Response:
|
|
190
195
|
r"""Send a POST request to the specified endpoint, with keyword
|
|
@@ -192,7 +197,7 @@ class KumoClient:
|
|
|
192
197
|
:meth:`requests.Session.post`.
|
|
193
198
|
"""
|
|
194
199
|
url = self._format_endpoint_url(endpoint)
|
|
195
|
-
return self._session.post(url=url, **kwargs)
|
|
200
|
+
return self._session.post(url=url, verify=self._verify_ssl, **kwargs)
|
|
196
201
|
|
|
197
202
|
def _patch(self, endpoint: str, **kwargs: Any) -> requests.Response:
|
|
198
203
|
r"""Send a PATCH request to the specified endpoint, with keyword
|
|
@@ -200,7 +205,7 @@ class KumoClient:
|
|
|
200
205
|
:meth:`requests.Session.patch`.
|
|
201
206
|
"""
|
|
202
207
|
url = self._format_endpoint_url(endpoint)
|
|
203
|
-
return self._session.patch(url=url, **kwargs)
|
|
208
|
+
return self._session.patch(url=url, verify=self._verify_ssl, **kwargs)
|
|
204
209
|
|
|
205
210
|
def _delete(self, endpoint: str, **kwargs: Any) -> requests.Response:
|
|
206
211
|
r"""Send a DELETE request to the specified endpoint, with keyword
|
|
@@ -208,7 +213,7 @@ class KumoClient:
|
|
|
208
213
|
:meth:`requests.Session.delete`.
|
|
209
214
|
"""
|
|
210
215
|
url = self._format_endpoint_url(endpoint)
|
|
211
|
-
return self._session.delete(url=url, **kwargs)
|
|
216
|
+
return self._session.delete(url=url, verify=self._verify_ssl, **kwargs)
|
|
212
217
|
|
|
213
218
|
def _format_endpoint_url(self, endpoint: str) -> str:
|
|
214
219
|
if endpoint[0] == "/":
|
kumoai/client/endpoints.py
CHANGED
|
@@ -147,3 +147,4 @@ class RFMEndpoints:
|
|
|
147
147
|
explain = Endpoint(f"{BASE}/explain", HTTPMethod.POST)
|
|
148
148
|
evaluate = Endpoint(f"{BASE}/evaluate", HTTPMethod.POST)
|
|
149
149
|
validate_query = Endpoint(f"{BASE}/validate_query", HTTPMethod.POST)
|
|
150
|
+
parse_query = Endpoint(f"{BASE}/parse_query", HTTPMethod.POST)
|
kumoai/client/rfm.py
CHANGED
|
@@ -1,6 +1,11 @@
|
|
|
1
|
+
from typing import Any
|
|
2
|
+
|
|
1
3
|
from kumoapi.json_serde import to_json_dict
|
|
2
4
|
from kumoapi.rfm import (
|
|
3
5
|
RFMEvaluateResponse,
|
|
6
|
+
RFMExplanationResponse,
|
|
7
|
+
RFMParseQueryRequest,
|
|
8
|
+
RFMParseQueryResponse,
|
|
4
9
|
RFMPredictResponse,
|
|
5
10
|
RFMValidateQueryRequest,
|
|
6
11
|
RFMValidateQueryResponse,
|
|
@@ -25,28 +30,35 @@ class RFMAPI:
|
|
|
25
30
|
Returns:
|
|
26
31
|
RFMPredictResponse containing the predictions
|
|
27
32
|
"""
|
|
28
|
-
# Send binary data to the predict endpoint
|
|
29
33
|
response = self._client._request(
|
|
30
|
-
RFMEndpoints.predict,
|
|
31
|
-
|
|
34
|
+
RFMEndpoints.predict,
|
|
35
|
+
data=request,
|
|
36
|
+
headers={'Content-Type': 'application/x-protobuf'},
|
|
37
|
+
)
|
|
32
38
|
raise_on_error(response)
|
|
33
39
|
return parse_response(RFMPredictResponse, response)
|
|
34
40
|
|
|
35
|
-
def explain(
|
|
41
|
+
def explain(
|
|
42
|
+
self,
|
|
43
|
+
request: bytes,
|
|
44
|
+
skip_summary: bool = False,
|
|
45
|
+
) -> RFMExplanationResponse:
|
|
36
46
|
"""Explain the RFM model on the given context.
|
|
37
47
|
|
|
38
48
|
Args:
|
|
39
49
|
request: The predict request as serialized protobuf.
|
|
50
|
+
skip_summary: Whether to skip generating a human-readable summary
|
|
51
|
+
of the explanation.
|
|
40
52
|
|
|
41
53
|
Returns:
|
|
42
54
|
RFMPredictResponse containing the explanations
|
|
43
55
|
"""
|
|
44
|
-
|
|
56
|
+
params: dict[str, Any] = {'generate_summary': not skip_summary}
|
|
45
57
|
response = self._client._request(
|
|
46
|
-
RFMEndpoints.explain, data=request,
|
|
58
|
+
RFMEndpoints.explain, data=request, params=params,
|
|
47
59
|
headers={'Content-Type': 'application/x-protobuf'})
|
|
48
60
|
raise_on_error(response)
|
|
49
|
-
return parse_response(
|
|
61
|
+
return parse_response(RFMExplanationResponse, response)
|
|
50
62
|
|
|
51
63
|
def evaluate(self, request: bytes) -> RFMEvaluateResponse:
|
|
52
64
|
"""Evaluate the RFM model on the given context.
|
|
@@ -57,7 +69,6 @@ class RFMAPI:
|
|
|
57
69
|
Returns:
|
|
58
70
|
RFMEvaluateResponse containing the computed metrics
|
|
59
71
|
"""
|
|
60
|
-
# Send binary data to the evaluate endpoint
|
|
61
72
|
response = self._client._request(
|
|
62
73
|
RFMEndpoints.evaluate, data=request,
|
|
63
74
|
headers={'Content-Type': 'application/x-protobuf'})
|
|
@@ -81,3 +92,21 @@ class RFMAPI:
|
|
|
81
92
|
json=to_json_dict(request))
|
|
82
93
|
raise_on_error(response)
|
|
83
94
|
return parse_response(RFMValidateQueryResponse, response)
|
|
95
|
+
|
|
96
|
+
def parse_query(
|
|
97
|
+
self,
|
|
98
|
+
request: RFMParseQueryRequest,
|
|
99
|
+
) -> RFMParseQueryResponse:
|
|
100
|
+
"""Validate a predictive query against a graph.
|
|
101
|
+
|
|
102
|
+
Args:
|
|
103
|
+
request: The request object containing
|
|
104
|
+
the query and graph definition
|
|
105
|
+
|
|
106
|
+
Returns:
|
|
107
|
+
RFMParseQueryResponse containing the QueryDefinition
|
|
108
|
+
"""
|
|
109
|
+
response = self._client._request(RFMEndpoints.parse_query,
|
|
110
|
+
json=to_json_dict(request))
|
|
111
|
+
raise_on_error(response)
|
|
112
|
+
return parse_response(RFMParseQueryResponse, response)
|
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import os
|
|
2
1
|
from typing import List
|
|
3
2
|
|
|
4
3
|
from kumoapi.source_table import (
|
|
@@ -12,14 +11,7 @@ from typing_extensions import override
|
|
|
12
11
|
|
|
13
12
|
from kumoai import global_state
|
|
14
13
|
from kumoai.connector.base import Connector
|
|
15
|
-
from kumoai.connector.utils import
|
|
16
|
-
MAX_PARTITION_SIZE,
|
|
17
|
-
MIN_PARTITION_SIZE,
|
|
18
|
-
_upload_partitioned_csv,
|
|
19
|
-
_upload_partitioned_parquet,
|
|
20
|
-
_upload_single_file,
|
|
21
|
-
logger,
|
|
22
|
-
)
|
|
14
|
+
from kumoai.connector.utils import delete_uploaded_table, upload_table
|
|
23
15
|
|
|
24
16
|
|
|
25
17
|
class FileUploadConnector(Connector):
|
|
@@ -59,7 +51,6 @@ class FileUploadConnector(Connector):
|
|
|
59
51
|
def name(self) -> str:
|
|
60
52
|
return f'{self._file_type}_upload_connector'
|
|
61
53
|
|
|
62
|
-
@override
|
|
63
54
|
@property
|
|
64
55
|
def source_type(self) -> DataSourceType:
|
|
65
56
|
return DataSourceType.S3
|
|
@@ -89,92 +80,110 @@ class FileUploadConnector(Connector):
|
|
|
89
80
|
auto_partition: bool = True,
|
|
90
81
|
partition_size_mb: int = 250,
|
|
91
82
|
) -> None:
|
|
92
|
-
r"""
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
this
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
83
|
+
r"""Upload a table to Kumo from a local or remote path.
|
|
84
|
+
|
|
85
|
+
Supports ``s3://``, ``gs://``, ``abfs://``, ``abfss://``, and ``az://``
|
|
86
|
+
|
|
87
|
+
Tables uploaded this way can be accessed from this
|
|
88
|
+
``FileUploadConnector`` using the provided name, e.g.,
|
|
89
|
+
``connector_obj["my_table"]``.
|
|
90
|
+
|
|
91
|
+
Local files
|
|
92
|
+
-----------
|
|
93
|
+
- Accepts one ``.parquet`` or ``.csv`` file (must match this
|
|
94
|
+
connector’s ``file_type``).
|
|
95
|
+
- If the file is > 1 GiB and ``auto_partition=True``, it is split
|
|
96
|
+
into ~``partition_size_mb`` MiB parts and uploaded under a common
|
|
97
|
+
prefix so the connector can read them as one table.
|
|
98
|
+
|
|
99
|
+
Remote paths
|
|
100
|
+
------------
|
|
101
|
+
- **Single file** (``.parquet``/``.csv``): validated and uploaded via
|
|
102
|
+
multipart PUT. Files > 1 GiB are rejected — re-shard to ~200 MiB
|
|
103
|
+
and upload the directory instead.
|
|
104
|
+
- **Directory**: must contain only one format (all Parquet or all CSV)
|
|
105
|
+
matching this connector’s ``file_type``. Files are validated
|
|
106
|
+
(consistent schema; CSV headers sanitized) and uploaded in parallel
|
|
107
|
+
with memory-safe budgeting.
|
|
102
108
|
|
|
103
109
|
.. warning::
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
110
|
+
For local uploads, input must be a single CSV or Parquet file
|
|
111
|
+
(matching the connector type). For remote uploads, mixed
|
|
112
|
+
CSV/Parquet directories are not supported. Remote single files
|
|
113
|
+
larger than 1 GiB are not supported.
|
|
107
114
|
|
|
115
|
+
Examples:
|
|
116
|
+
---------
|
|
108
117
|
.. code-block:: python
|
|
109
118
|
|
|
110
119
|
import kumoai
|
|
111
|
-
|
|
120
|
+
conn = kumoai.FileUploadConnector(file_type="parquet")
|
|
121
|
+
|
|
122
|
+
# Local: small file
|
|
123
|
+
conn.upload(name="users", path="/data/users.parquet")
|
|
124
|
+
|
|
125
|
+
# Local: large file (auto-partitions)
|
|
126
|
+
conn.upload(
|
|
127
|
+
name="txns",
|
|
128
|
+
path="/data/large_txns.parquet",
|
|
129
|
+
)
|
|
130
|
+
|
|
131
|
+
# Local: disable auto-partitioning (raises if > 1 GiB)
|
|
132
|
+
conn.upload(
|
|
133
|
+
name="users",
|
|
134
|
+
path="/data/users.parquet",
|
|
135
|
+
auto_partition=False,
|
|
136
|
+
)
|
|
137
|
+
|
|
138
|
+
# CSV connector
|
|
139
|
+
csv_conn = kumoai.FileUploadConnector(file_type="csv")
|
|
140
|
+
csv_conn.upload(name="sales", path="/data/sales.csv")
|
|
112
141
|
|
|
113
|
-
#
|
|
114
|
-
|
|
142
|
+
# Remote: single file (<= 1 GiB)
|
|
143
|
+
conn.upload(name="logs", path="s3://bkt/path/logs.parquet")
|
|
115
144
|
|
|
116
|
-
#
|
|
117
|
-
|
|
118
|
-
path="/data/large_transactions.parquet")
|
|
145
|
+
# Remote: directory of shards (uniform format)
|
|
146
|
+
csv_conn.upload(name="events", path="gs://mybkt/events_csv/")
|
|
119
147
|
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
148
|
+
Args:
|
|
149
|
+
name:
|
|
150
|
+
Table name to create in Kumo; access later via this connector.
|
|
151
|
+
path:
|
|
152
|
+
Local path or remote URL to a ``.parquet``/``.csv`` file or a
|
|
153
|
+
directory (uniform format). The format must match this
|
|
154
|
+
connector’s ``file_type``.
|
|
155
|
+
auto_partition:
|
|
156
|
+
Local-only. If ``True`` and the local file is > 1 GiB, split
|
|
157
|
+
into ~``partition_size_mb`` MiB parts.
|
|
158
|
+
partition_size_mb:
|
|
159
|
+
Local-only. Target partition size (100–1000 MiB) when
|
|
160
|
+
``auto_partition`` is ``True``.
|
|
161
|
+
"""
|
|
162
|
+
upload_table(name=name, path=path, auto_partition=auto_partition,
|
|
163
|
+
partition_size_mb=partition_size_mb,
|
|
164
|
+
file_type=self._file_type)
|
|
165
|
+
|
|
166
|
+
def delete(
|
|
167
|
+
self,
|
|
168
|
+
name: str,
|
|
169
|
+
) -> None:
|
|
170
|
+
r"""Synchronously deletes a previously uploaded table from the Kumo
|
|
171
|
+
data plane.
|
|
123
172
|
|
|
124
|
-
|
|
125
|
-
connectorCSV = kumoai.FileUploadConnector(file_type="csv")
|
|
173
|
+
.. code-block:: python
|
|
126
174
|
|
|
127
|
-
#
|
|
128
|
-
|
|
175
|
+
# Assume we have uploaded a `.parquet` table named `users`, and a
|
|
176
|
+
# `FileUploadConnector` has been created called `connector`, and
|
|
177
|
+
# we want to delete this table from Kumo:
|
|
178
|
+
connector.delete(name="users")
|
|
129
179
|
|
|
130
180
|
Args:
|
|
131
|
-
name: The name of the table to be
|
|
132
|
-
|
|
133
|
-
:
|
|
134
|
-
path: The full path of the table to be uploaded, on the local
|
|
135
|
-
machine. File Type must match the connector type.
|
|
136
|
-
auto_partition: Whether to automatically
|
|
137
|
-
partition large files (>1GB).
|
|
138
|
-
If False and file is >1GB, raises ValueError. Supports both
|
|
139
|
-
Parquet and CSV files.
|
|
140
|
-
partition_size_mb: The size of each partition in MB. Only used if
|
|
141
|
-
auto_partition is True.
|
|
181
|
+
name: The name of the table to be deleted. This table must have
|
|
182
|
+
previously been uploaded with a call to
|
|
183
|
+
:meth:`~kumoai.connector.FileUploadConnector.upload`.
|
|
142
184
|
"""
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
# Validate file type
|
|
149
|
-
if not (path.endswith(".parquet") or path.endswith(".csv")):
|
|
150
|
-
raise ValueError(f"Path {path} must be either a CSV or Parquet "
|
|
151
|
-
f"file. Partitioned data is not currently "
|
|
152
|
-
f"supported.")
|
|
153
|
-
|
|
154
|
-
file_size = os.path.getsize(path)
|
|
155
|
-
|
|
156
|
-
# Route based on file size
|
|
157
|
-
if file_size < MAX_PARTITION_SIZE:
|
|
158
|
-
return _upload_single_file(name, path)
|
|
159
|
-
|
|
160
|
-
if not auto_partition:
|
|
161
|
-
raise ValueError(f"File {path} is {file_size / (1024**3):.2f}GB, "
|
|
162
|
-
f"which exceeds the 1GB limit. Enable "
|
|
163
|
-
f"auto_partition=True to automatically partition "
|
|
164
|
-
f"large files.")
|
|
165
|
-
|
|
166
|
-
# Partition and upload large files
|
|
167
|
-
partition_size = partition_size_mb * 1024**2
|
|
168
|
-
if (partition_size > MAX_PARTITION_SIZE
|
|
169
|
-
or partition_size < MIN_PARTITION_SIZE):
|
|
170
|
-
raise ValueError(f"Partition size {partition_size_mb}MB must be "
|
|
171
|
-
f"between {MIN_PARTITION_SIZE / 1024**2}MB and "
|
|
172
|
-
f"{MAX_PARTITION_SIZE / 1024**2}MB.")
|
|
173
|
-
|
|
174
|
-
logger.info(
|
|
175
|
-
"File %s is large with size %s, partitioning for upload...", path,
|
|
176
|
-
file_size)
|
|
177
|
-
if path.endswith('.parquet'):
|
|
178
|
-
_upload_partitioned_parquet(name, path, partition_size)
|
|
179
|
-
else:
|
|
180
|
-
_upload_partitioned_csv(name, path, partition_size)
|
|
185
|
+
if not self.has_table(name):
|
|
186
|
+
raise ValueError(f"The table '{name}' does not exist in {self}. "
|
|
187
|
+
f"Please check the existence of the source data.")
|
|
188
|
+
|
|
189
|
+
delete_uploaded_table(name, self._file_type)
|
|
@@ -17,6 +17,7 @@ from kumoai.connector import Connector
|
|
|
17
17
|
_ENV_SNOWFLAKE_USER = 'SNOWFLAKE_USER'
|
|
18
18
|
_ENV_SNOWFLAKE_PASSWORD = 'SNOWFLAKE_PASSWORD'
|
|
19
19
|
_ENV_SNOWFLAKE_PRIVATE_KEY = 'SNOWFLAKE_PRIVATE_KEY'
|
|
20
|
+
_ENV_SNOWFLAKE_PRIVATE_KEY_PASSPHRASE = 'SNOWFLAKE_PRIVATE_KEY_PASSPHRASE'
|
|
20
21
|
|
|
21
22
|
|
|
22
23
|
class SnowflakeConnector(Connector):
|
|
@@ -100,6 +101,9 @@ class SnowflakeConnector(Connector):
|
|
|
100
101
|
os.getenv(_ENV_SNOWFLAKE_PASSWORD))
|
|
101
102
|
private_key = credentials.get("private_key",
|
|
102
103
|
os.getenv(_ENV_SNOWFLAKE_PRIVATE_KEY))
|
|
104
|
+
private_key_passphrase = credentials.get(
|
|
105
|
+
"private_key_passphrase",
|
|
106
|
+
os.getenv(_ENV_SNOWFLAKE_PRIVATE_KEY_PASSPHRASE))
|
|
103
107
|
|
|
104
108
|
if not password and not private_key:
|
|
105
109
|
self._create_native_connector()
|
|
@@ -109,6 +113,9 @@ class SnowflakeConnector(Connector):
|
|
|
109
113
|
# decide which credential class (KeyPair or UsernamePassword) to use
|
|
110
114
|
if private_key:
|
|
111
115
|
credentials_args["private_key"] = private_key
|
|
116
|
+
if private_key_passphrase:
|
|
117
|
+
credentials_args[
|
|
118
|
+
"private_key_passphrase"] = private_key_passphrase
|
|
112
119
|
else:
|
|
113
120
|
credentials_args["password"] = password
|
|
114
121
|
error_name = None
|
|
@@ -202,6 +209,8 @@ class SnowflakeConnector(Connector):
|
|
|
202
209
|
credentials = KeyPair(
|
|
203
210
|
user=credentials["user"],
|
|
204
211
|
private_key=credentials["private_key"],
|
|
212
|
+
private_key_passphrase=credentials.get(
|
|
213
|
+
"private_key_passphrase"),
|
|
205
214
|
)
|
|
206
215
|
args = CreateConnectorArgs(
|
|
207
216
|
config=SnowflakeConnectorResourceConfig(
|