exasol-python-extension-common 0.2.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- exasol_python_extension_common-0.2.0/LICENSE +21 -0
- exasol_python_extension_common-0.2.0/PKG-INFO +39 -0
- exasol_python_extension_common-0.2.0/README.md +16 -0
- exasol_python_extension_common-0.2.0/exasol/python_extension_common/__init__.py +0 -0
- exasol_python_extension_common-0.2.0/exasol/python_extension_common/deployment/language_container_deployer.py +349 -0
- exasol_python_extension_common-0.2.0/exasol/python_extension_common/deployment/language_container_deployer_cli.py +220 -0
- exasol_python_extension_common-0.2.0/exasol/python_extension_common/deployment/language_container_validator.py +140 -0
- exasol_python_extension_common-0.2.0/pyproject.toml +69 -0
- exasol_python_extension_common-0.2.0/setup.py +37 -0
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2024 Exasol
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
Metadata-Version: 2.1
|
|
2
|
+
Name: exasol-python-extension-common
|
|
3
|
+
Version: 0.2.0
|
|
4
|
+
Summary: A collection of common utilities for Exasol extensions.
|
|
5
|
+
License: MIT
|
|
6
|
+
Author: Mikhail Beck
|
|
7
|
+
Author-email: mikhail.beck@exasol.com
|
|
8
|
+
Requires-Python: >=3.8.0,<4.0
|
|
9
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
10
|
+
Classifier: Programming Language :: Python :: 3
|
|
11
|
+
Classifier: Programming Language :: Python :: 3.8
|
|
12
|
+
Classifier: Programming Language :: Python :: 3.9
|
|
13
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
14
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
15
|
+
Requires-Dist: click (>=8.1.7,<9.0.0)
|
|
16
|
+
Requires-Dist: exasol-bucketfs (>=0.10.0)
|
|
17
|
+
Requires-Dist: exasol-saas-api (>=0.7.0,<1.0.0)
|
|
18
|
+
Requires-Dist: pyexasol (>=0.25.0,<0.26.0)
|
|
19
|
+
Requires-Dist: requests (<2.32.0)
|
|
20
|
+
Requires-Dist: tenacity (>=8.3.0,<9.0.0)
|
|
21
|
+
Description-Content-Type: text/markdown
|
|
22
|
+
|
|
23
|
+
# Exasol Python Extension Common
|
|
24
|
+
|
|
25
|
+
A package with common functionality, shared by Exasol Python Extensions, e.g.
|
|
26
|
+
* [transformers-extension](https://github.com/exasol/transformers-extension)
|
|
27
|
+
* [sagemaker-extension](https://github.com/exasol/sagemaker-extension)
|
|
28
|
+
|
|
29
|
+
## Features
|
|
30
|
+
|
|
31
|
+
A deployer for script language containers (SLC) to be used by UDF-based extensions of Exasol database requiring a special SLC.
|
|
32
|
+
|
|
33
|
+
## More documentation
|
|
34
|
+
|
|
35
|
+
* User Guide
|
|
36
|
+
* [Developer Guide](doc/developer-guide.md)
|
|
37
|
+
* [User Defined Functions (UDF)](https://docs.exasol.com/db/latest/database_concepts/udf_scripts.htm)
|
|
38
|
+
* [Script Language Containers (SLC)](https://github.com/exasol/script-languages-release/)
|
|
39
|
+
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
# Exasol Python Extension Common
|
|
2
|
+
|
|
3
|
+
A package with common functionality, shared by Exasol Python Extensions, e.g.
|
|
4
|
+
* [transformers-extension](https://github.com/exasol/transformers-extension)
|
|
5
|
+
* [sagemaker-extension](https://github.com/exasol/sagemaker-extension)
|
|
6
|
+
|
|
7
|
+
## Features
|
|
8
|
+
|
|
9
|
+
A deployer for script language containers (SLC) to be used by UDF-based extensions of Exasol database requiring a special SLC.
|
|
10
|
+
|
|
11
|
+
## More documentation
|
|
12
|
+
|
|
13
|
+
* User Guide
|
|
14
|
+
* [Developer Guide](doc/developer-guide.md)
|
|
15
|
+
* [User Defined Functions (UDF)](https://docs.exasol.com/db/latest/database_concepts/udf_scripts.htm)
|
|
16
|
+
* [Script Language Containers (SLC)](https://github.com/exasol/script-languages-release/)
|
|
File without changes
|
|
@@ -0,0 +1,349 @@
|
|
|
1
|
+
from enum import Enum
|
|
2
|
+
from textwrap import dedent
|
|
3
|
+
from typing import List, Optional, Dict
|
|
4
|
+
from pathlib import Path, PurePosixPath
|
|
5
|
+
import logging
|
|
6
|
+
import tempfile
|
|
7
|
+
import ssl
|
|
8
|
+
import requests # type: ignore
|
|
9
|
+
import pyexasol # type: ignore
|
|
10
|
+
import exasol.bucketfs as bfs # type: ignore
|
|
11
|
+
from exasol.saas.client.api_access import (get_connection_params, get_database_id) # type: ignore
|
|
12
|
+
|
|
13
|
+
from exasol.python_extension_common.deployment.language_container_validator import (
|
|
14
|
+
wait_language_container, temp_schema
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
logger = logging.getLogger(__name__)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def get_websocket_sslopt(use_ssl_cert_validation: bool = True,
|
|
22
|
+
ssl_trusted_ca: Optional[str] = None,
|
|
23
|
+
ssl_client_certificate: Optional[str] = None,
|
|
24
|
+
ssl_private_key: Optional[str] = None) -> dict:
|
|
25
|
+
"""
|
|
26
|
+
Returns a dictionary in the winsocket-client format
|
|
27
|
+
(see https://websocket-client.readthedocs.io/en/latest/faq.html#what-else-can-i-do-with-sslopts)
|
|
28
|
+
"""
|
|
29
|
+
|
|
30
|
+
# Is server certificate validation required?
|
|
31
|
+
sslopt: Dict[str, object] = {"cert_reqs": ssl.CERT_REQUIRED if use_ssl_cert_validation else ssl.CERT_NONE}
|
|
32
|
+
|
|
33
|
+
# Is a bundle with trusted CAs provided?
|
|
34
|
+
if ssl_trusted_ca:
|
|
35
|
+
trusted_ca_path = Path(ssl_trusted_ca)
|
|
36
|
+
if trusted_ca_path.is_dir():
|
|
37
|
+
sslopt["ca_cert_path"] = ssl_trusted_ca
|
|
38
|
+
elif trusted_ca_path.is_file():
|
|
39
|
+
sslopt["ca_certs"] = ssl_trusted_ca
|
|
40
|
+
else:
|
|
41
|
+
raise ValueError(f"Trusted CA location {ssl_trusted_ca} doesn't exist.")
|
|
42
|
+
|
|
43
|
+
# Is client's own certificate provided?
|
|
44
|
+
if ssl_client_certificate:
|
|
45
|
+
if not Path(ssl_client_certificate).is_file():
|
|
46
|
+
raise ValueError(f"Certificate file {ssl_client_certificate} doesn't exist.")
|
|
47
|
+
sslopt["certfile"] = ssl_client_certificate
|
|
48
|
+
if ssl_private_key:
|
|
49
|
+
if not Path(ssl_private_key).is_file():
|
|
50
|
+
raise ValueError(f"Private key file {ssl_private_key} doesn't exist.")
|
|
51
|
+
sslopt["keyfile"] = ssl_private_key
|
|
52
|
+
|
|
53
|
+
return sslopt
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
class LanguageActivationLevel(Enum):
|
|
57
|
+
"""
|
|
58
|
+
Language activation level, i.e.
|
|
59
|
+
ALTER <LanguageActivationLevel> SET SCRIPT_LANGUAGES=...
|
|
60
|
+
"""
|
|
61
|
+
Session = 'SESSION'
|
|
62
|
+
System = 'SYSTEM'
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def get_language_settings(pyexasol_conn: pyexasol.ExaConnection, alter_type: LanguageActivationLevel) -> str:
|
|
66
|
+
"""
|
|
67
|
+
Reads the current language settings at the specified level.
|
|
68
|
+
|
|
69
|
+
pyexasol_conn - Opened database connection.
|
|
70
|
+
alter_type - Activation level - SYSTEM or SESSION.
|
|
71
|
+
"""
|
|
72
|
+
result = pyexasol_conn.execute(
|
|
73
|
+
f"""SELECT "{alter_type.value}_VALUE" FROM SYS.EXA_PARAMETERS WHERE
|
|
74
|
+
PARAMETER_NAME='SCRIPT_LANGUAGES'""").fetchall()
|
|
75
|
+
return result[0][0]
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def get_udf_path(bucket_base_path: bfs.path.PathLike, bucket_file: str) -> PurePosixPath:
|
|
79
|
+
"""
|
|
80
|
+
Returns the path of the specified file in a bucket, as it's seen from a UDF
|
|
81
|
+
|
|
82
|
+
bucket_base_path - Base directory in the bucket
|
|
83
|
+
bucket_file - File path in the bucket, relative to the base directory.
|
|
84
|
+
"""
|
|
85
|
+
|
|
86
|
+
file_path = bucket_base_path / bucket_file
|
|
87
|
+
return PurePosixPath(file_path.as_udf_path())
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
class LanguageContainerDeployer:
|
|
91
|
+
|
|
92
|
+
def __init__(self,
|
|
93
|
+
pyexasol_connection: pyexasol.ExaConnection,
|
|
94
|
+
language_alias: str,
|
|
95
|
+
bucketfs_path: bfs.path.PathLike) -> None:
|
|
96
|
+
|
|
97
|
+
self._bucketfs_path = bucketfs_path
|
|
98
|
+
self._language_alias = language_alias
|
|
99
|
+
self._pyexasol_conn = pyexasol_connection
|
|
100
|
+
logger.debug("Init %s", LanguageContainerDeployer.__name__)
|
|
101
|
+
|
|
102
|
+
def download_and_run(self, url: str,
|
|
103
|
+
bucket_file_path: str,
|
|
104
|
+
alter_system: bool = True,
|
|
105
|
+
allow_override: bool = False,
|
|
106
|
+
wait_for_completion: bool = True) -> None:
|
|
107
|
+
"""
|
|
108
|
+
Downloads the language container from the provided url to a temporary file and then deploys it.
|
|
109
|
+
See docstring on the `run` method for details on what is involved in the deployment.
|
|
110
|
+
|
|
111
|
+
url - Address where the container will be downloaded from.
|
|
112
|
+
bucket_file_path - Path within the designated bucket where the container should be uploaded.
|
|
113
|
+
alter_system - If True will try to activate the container at the System level.
|
|
114
|
+
allow_override - If True the activation of a language container with the same alias will be
|
|
115
|
+
overriden, otherwise a RuntimeException will be thrown.
|
|
116
|
+
wait_for_completion - If True will wait until the language container becomes operational.
|
|
117
|
+
"""
|
|
118
|
+
|
|
119
|
+
with tempfile.NamedTemporaryFile() as tmp_file:
|
|
120
|
+
response = requests.get(url, stream=True, timeout=300)
|
|
121
|
+
response.raise_for_status()
|
|
122
|
+
tmp_file.write(response.content)
|
|
123
|
+
|
|
124
|
+
self.run(Path(tmp_file.name), bucket_file_path, alter_system, allow_override,
|
|
125
|
+
wait_for_completion)
|
|
126
|
+
|
|
127
|
+
def run(self, container_file: Optional[Path] = None,
|
|
128
|
+
bucket_file_path: Optional[str] = None,
|
|
129
|
+
alter_system: bool = True,
|
|
130
|
+
allow_override: bool = False,
|
|
131
|
+
wait_for_completion: bool = True) -> None:
|
|
132
|
+
"""
|
|
133
|
+
Deploys the language container. This includes two steps, both of which are optional:
|
|
134
|
+
- Uploading the container into the database. This step can be skipped if the container
|
|
135
|
+
has already been uploaded.
|
|
136
|
+
- Activating the container. In case the container does not get activated at the System
|
|
137
|
+
level, two alternative activation SQL commands (one for the System and one for the Session
|
|
138
|
+
levels) will be printed on the console.
|
|
139
|
+
|
|
140
|
+
container_file - Path of the container tar.gz file in a local file system.
|
|
141
|
+
If not provided the container is assumed to be uploaded already.
|
|
142
|
+
bucket_file_path - Path within the designated bucket where the container should be uploaded.
|
|
143
|
+
If not specified the name of the container file will be used instead.
|
|
144
|
+
alter_system - If True will try to activate the container at the System level.
|
|
145
|
+
allow_override - If True the activation of a language container with the same alias will be
|
|
146
|
+
overriden, otherwise a RuntimeException will be thrown.
|
|
147
|
+
wait_for_completion - If True will wait until the language container becomes operational.
|
|
148
|
+
"""
|
|
149
|
+
|
|
150
|
+
if not bucket_file_path:
|
|
151
|
+
if not container_file:
|
|
152
|
+
raise ValueError('Either a container file or a bucket file path must be specified.')
|
|
153
|
+
bucket_file_path = container_file.name
|
|
154
|
+
|
|
155
|
+
if container_file:
|
|
156
|
+
self.upload_container(container_file, bucket_file_path)
|
|
157
|
+
|
|
158
|
+
# Activate the language container.
|
|
159
|
+
if alter_system:
|
|
160
|
+
self.activate_container(bucket_file_path, LanguageActivationLevel.System,
|
|
161
|
+
allow_override)
|
|
162
|
+
self.activate_container(bucket_file_path, LanguageActivationLevel.Session,
|
|
163
|
+
allow_override)
|
|
164
|
+
|
|
165
|
+
# Maybe wait until the container becomes operational.
|
|
166
|
+
if container_file and wait_for_completion:
|
|
167
|
+
with temp_schema(self._pyexasol_conn) as schema:
|
|
168
|
+
wait_language_container(self._pyexasol_conn, self._language_alias, schema)
|
|
169
|
+
|
|
170
|
+
if not alter_system:
|
|
171
|
+
message = dedent(f"""
|
|
172
|
+
In SQL, you can activate the SLC
|
|
173
|
+
by using the following statements:
|
|
174
|
+
|
|
175
|
+
To activate the SLC only for the current session:
|
|
176
|
+
{self.generate_activation_command(bucket_file_path, LanguageActivationLevel.Session, True)}
|
|
177
|
+
|
|
178
|
+
To activate the SLC on the system:
|
|
179
|
+
{self.generate_activation_command(bucket_file_path, LanguageActivationLevel.System, True)}
|
|
180
|
+
""")
|
|
181
|
+
print(message)
|
|
182
|
+
|
|
183
|
+
def upload_container(self, container_file: Path,
|
|
184
|
+
bucket_file_path: Optional[str] = None) -> None:
|
|
185
|
+
"""
|
|
186
|
+
Upload the language container to the BucketFS.
|
|
187
|
+
|
|
188
|
+
container_file - Path of the container tar.gz file in a local file system.
|
|
189
|
+
bucket_file_path - Path within the designated bucket where the container should be uploaded.
|
|
190
|
+
"""
|
|
191
|
+
if not container_file.is_file():
|
|
192
|
+
raise RuntimeError(f"Container file {container_file} "
|
|
193
|
+
f"is not a file.")
|
|
194
|
+
with open(container_file, "br") as f:
|
|
195
|
+
file_path = self._bucketfs_path / bucket_file_path
|
|
196
|
+
file_path.write(f)
|
|
197
|
+
logging.debug("Container is uploaded to bucketfs")
|
|
198
|
+
|
|
199
|
+
def activate_container(self, bucket_file_path: str,
|
|
200
|
+
alter_type: LanguageActivationLevel = LanguageActivationLevel.Session,
|
|
201
|
+
allow_override: bool = False) -> None:
|
|
202
|
+
"""
|
|
203
|
+
Activates the language container at the required level.
|
|
204
|
+
|
|
205
|
+
bucket_file_path - Path within the designated bucket where the container is uploaded.
|
|
206
|
+
alter_type - Language activation level, defaults to the SESSION.
|
|
207
|
+
allow_override - If True the activation of a language container with the same alias will be overriden,
|
|
208
|
+
otherwise a RuntimeException will be thrown.
|
|
209
|
+
"""
|
|
210
|
+
alter_command = self.generate_activation_command(bucket_file_path, alter_type, allow_override)
|
|
211
|
+
self._pyexasol_conn.execute(alter_command)
|
|
212
|
+
logging.debug(alter_command)
|
|
213
|
+
|
|
214
|
+
def generate_activation_command(self, bucket_file_path: str,
|
|
215
|
+
alter_type: LanguageActivationLevel,
|
|
216
|
+
allow_override: bool = False) -> str:
|
|
217
|
+
"""
|
|
218
|
+
Generates an SQL command to activate the SLC container at the required level. The command will
|
|
219
|
+
preserve existing activations of other containers identified by different language aliases.
|
|
220
|
+
Activation of a container with the same alias, if exists, will be overwritten.
|
|
221
|
+
|
|
222
|
+
bucket_file_path - Path within the designated bucket where the container is uploaded.
|
|
223
|
+
alter_type - Activation level - SYSTEM or SESSION.
|
|
224
|
+
allow_override - If True the activation of a language container with the same alias will be overriden,
|
|
225
|
+
otherwise a RuntimeException will be thrown.
|
|
226
|
+
"""
|
|
227
|
+
path_in_udf = get_udf_path(self._bucketfs_path, bucket_file_path)
|
|
228
|
+
new_settings = \
|
|
229
|
+
self._update_previous_language_settings(alter_type, allow_override, path_in_udf)
|
|
230
|
+
alter_command = \
|
|
231
|
+
f"ALTER {alter_type.value} SET SCRIPT_LANGUAGES='{new_settings}';"
|
|
232
|
+
return alter_command
|
|
233
|
+
|
|
234
|
+
def _update_previous_language_settings(self, alter_type: LanguageActivationLevel,
|
|
235
|
+
allow_override: bool,
|
|
236
|
+
path_in_udf: PurePosixPath) -> str:
|
|
237
|
+
prev_lang_settings = get_language_settings(self._pyexasol_conn, alter_type)
|
|
238
|
+
prev_lang_aliases = prev_lang_settings.split(" ")
|
|
239
|
+
self._check_if_requested_language_alias_already_exists(
|
|
240
|
+
allow_override, prev_lang_aliases)
|
|
241
|
+
new_definitions_str = self._generate_new_language_settings(
|
|
242
|
+
path_in_udf, prev_lang_aliases)
|
|
243
|
+
return new_definitions_str
|
|
244
|
+
|
|
245
|
+
def get_language_definition(self, bucket_file_path: str):
|
|
246
|
+
"""
|
|
247
|
+
Generate a language definition (ALIAS=URL) for the specified bucket file path.
|
|
248
|
+
|
|
249
|
+
bucket_file_path - Path within the designated bucket where the container is uploaded.
|
|
250
|
+
"""
|
|
251
|
+
path_in_udf = get_udf_path(self._bucketfs_path, bucket_file_path)
|
|
252
|
+
result = self._generate_new_language_settings(path_in_udf=path_in_udf, prev_lang_aliases=[])
|
|
253
|
+
return result
|
|
254
|
+
|
|
255
|
+
def _generate_new_language_settings(self, path_in_udf: PurePosixPath,
|
|
256
|
+
prev_lang_aliases: List[str]) -> str:
|
|
257
|
+
other_definitions = [
|
|
258
|
+
alias_definition for alias_definition in prev_lang_aliases
|
|
259
|
+
if not alias_definition.startswith(self._language_alias + "=")]
|
|
260
|
+
path_in_udf_without_buckets = PurePosixPath(*path_in_udf.parts[2:])
|
|
261
|
+
new_language_alias_definition = \
|
|
262
|
+
f"{self._language_alias}=localzmq+protobuf:///" \
|
|
263
|
+
f"{path_in_udf_without_buckets}?lang=python#" \
|
|
264
|
+
f"{path_in_udf}/exaudf/exaudfclient_py3"
|
|
265
|
+
new_definitions = other_definitions + [new_language_alias_definition]
|
|
266
|
+
new_definitions_str = " ".join(new_definitions)
|
|
267
|
+
return new_definitions_str
|
|
268
|
+
|
|
269
|
+
def _check_if_requested_language_alias_already_exists(
|
|
270
|
+
self, allow_override: bool,
|
|
271
|
+
prev_lang_aliases: List[str]) -> None:
|
|
272
|
+
definition_for_requested_alias = [
|
|
273
|
+
alias_definition for alias_definition in prev_lang_aliases
|
|
274
|
+
if alias_definition.startswith(self._language_alias + "=")]
|
|
275
|
+
if not len(definition_for_requested_alias) == 0:
|
|
276
|
+
warning_message = f"The requested language alias {self._language_alias} is already in use."
|
|
277
|
+
if allow_override:
|
|
278
|
+
logging.warning(warning_message)
|
|
279
|
+
else:
|
|
280
|
+
raise RuntimeError(warning_message)
|
|
281
|
+
|
|
282
|
+
@classmethod
|
|
283
|
+
def create(cls,
|
|
284
|
+
language_alias: str, dsn: Optional[str] = None,
|
|
285
|
+
db_user: Optional[str] = None, db_password: Optional[str] = None,
|
|
286
|
+
bucketfs_host: Optional[str] = None, bucketfs_port: Optional[int] = None,
|
|
287
|
+
bucketfs_name: Optional[str] = None, bucket: Optional[str] = None,
|
|
288
|
+
bucketfs_user: Optional[str] = None, bucketfs_password: Optional[str] = None,
|
|
289
|
+
bucketfs_use_https: bool = True,
|
|
290
|
+
saas_url: Optional[str] = None,
|
|
291
|
+
saas_account_id: Optional[str] = None, saas_database_id: Optional[str] = None,
|
|
292
|
+
saas_database_name: Optional[str] = None, saas_token: Optional[str] = None,
|
|
293
|
+
path_in_bucket: str = '',
|
|
294
|
+
use_ssl_cert_validation: bool = True, ssl_trusted_ca: Optional[str] = None,
|
|
295
|
+
ssl_client_certificate: Optional[str] = None,
|
|
296
|
+
ssl_private_key: Optional[str] = None) -> "LanguageContainerDeployer":
|
|
297
|
+
|
|
298
|
+
# Infer where the database is - on-prem or SaaS.
|
|
299
|
+
if all((dsn, db_user, db_password, bucketfs_host, bucketfs_port,
|
|
300
|
+
bucketfs_name, bucket, bucketfs_user, bucketfs_password)):
|
|
301
|
+
connection_params = {'dsn': dsn, 'user': db_user, 'password': db_password}
|
|
302
|
+
bfs_url = (f"{'https' if bucketfs_use_https else 'http'}://"
|
|
303
|
+
f"{bucketfs_host}:{bucketfs_port}")
|
|
304
|
+
verify = ssl_trusted_ca or use_ssl_cert_validation
|
|
305
|
+
bucketfs_path = bfs.path.build_path(backend=bfs.path.StorageBackend.onprem,
|
|
306
|
+
url=bfs_url,
|
|
307
|
+
username=bucketfs_user,
|
|
308
|
+
password=bucketfs_password,
|
|
309
|
+
service_name=bucketfs_name,
|
|
310
|
+
bucket_name=bucket,
|
|
311
|
+
verify=verify,
|
|
312
|
+
path=path_in_bucket)
|
|
313
|
+
|
|
314
|
+
elif all((saas_url, saas_account_id, saas_token,
|
|
315
|
+
any((saas_database_id, saas_database_name)))):
|
|
316
|
+
connection_params = get_connection_params(host=saas_url,
|
|
317
|
+
account_id=saas_account_id,
|
|
318
|
+
database_id=saas_database_id,
|
|
319
|
+
database_name=saas_database_name,
|
|
320
|
+
pat=saas_token)
|
|
321
|
+
saas_database_id = (saas_database_id or
|
|
322
|
+
get_database_id(
|
|
323
|
+
host=saas_url,
|
|
324
|
+
account_id=saas_account_id,
|
|
325
|
+
pat=saas_token,
|
|
326
|
+
database_name=saas_database_name
|
|
327
|
+
))
|
|
328
|
+
bucketfs_path = bfs.path.build_path(backend=bfs.path.StorageBackend.saas,
|
|
329
|
+
url=saas_url,
|
|
330
|
+
account_id=saas_account_id,
|
|
331
|
+
database_id=saas_database_id,
|
|
332
|
+
pat=saas_token,
|
|
333
|
+
path=path_in_bucket)
|
|
334
|
+
else:
|
|
335
|
+
raise ValueError('Incomplete parameter list. '
|
|
336
|
+
'Please either provide the parameters [dns, db_user, '
|
|
337
|
+
'db_password, bucketfs_host, bucketfs_port, bucketfs_name, '
|
|
338
|
+
'bucket, bucketfs_user, bucketfs_password] for an On-Prem '
|
|
339
|
+
'database or [saas_url, saas_account_id, saas_database_id, '
|
|
340
|
+
'saas_token] for a SaaS database.')
|
|
341
|
+
|
|
342
|
+
websocket_sslopt = get_websocket_sslopt(use_ssl_cert_validation, ssl_trusted_ca,
|
|
343
|
+
ssl_client_certificate, ssl_private_key)
|
|
344
|
+
|
|
345
|
+
pyexasol_conn = pyexasol.connect(**connection_params,
|
|
346
|
+
encryption=True,
|
|
347
|
+
websocket_sslopt=websocket_sslopt)
|
|
348
|
+
|
|
349
|
+
return cls(pyexasol_conn, language_alias, bucketfs_path)
|
|
@@ -0,0 +1,220 @@
|
|
|
1
|
+
from typing import Optional, Any
|
|
2
|
+
import os
|
|
3
|
+
import re
|
|
4
|
+
from enum import Enum
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
import click
|
|
7
|
+
from exasol.python_extension_common.deployment.language_container_deployer import LanguageContainerDeployer
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class CustomizableParameters(Enum):
|
|
11
|
+
"""
|
|
12
|
+
Parameters of the cli that can be programmatically customised by a developer
|
|
13
|
+
of a specialised version of the cli.
|
|
14
|
+
The names in the enum list should match the parameter names in language_container_deployer_main.
|
|
15
|
+
"""
|
|
16
|
+
container_url = 1
|
|
17
|
+
container_name = 2
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class _ParameterFormatters:
|
|
21
|
+
"""
|
|
22
|
+
Class facilitating customization of the cli.
|
|
23
|
+
|
|
24
|
+
The idea is that some of the cli parameters can be programmatically customized based
|
|
25
|
+
on values of other parameters and externally supplied formatters. For example a specialized
|
|
26
|
+
version of the cli may want to provide its own url. Furthermore, this url will depend on
|
|
27
|
+
the user supplied parameter called "version". The solution is to set a formatter for the
|
|
28
|
+
url, for instance "http://my_stuff/{version}/my_data". If the user specifies non-empty version
|
|
29
|
+
parameter the url will be fully formed.
|
|
30
|
+
|
|
31
|
+
A formatter may include more than one parameter. In the previous example the url could,
|
|
32
|
+
for instance, also include a username: "http://my_stuff/{version}/{user}/my_data".
|
|
33
|
+
|
|
34
|
+
Note that customized parameters can only be updated in a callback function. There is no
|
|
35
|
+
way to inject them directly into the cli. Also, the current implementation doesn't perform
|
|
36
|
+
the update if the value of the parameter dressed with the callback is None.
|
|
37
|
+
|
|
38
|
+
IMPORTANT! Please make sure that the formatters are set up before the call to the cli function,
|
|
39
|
+
e.g. language_container_deployer_main, is executed.
|
|
40
|
+
"""
|
|
41
|
+
def __init__(self):
|
|
42
|
+
self._formatters = {}
|
|
43
|
+
|
|
44
|
+
def __call__(self, ctx: click.Context, param: click.Parameter, value: Optional[Any]) -> Optional[Any]:
|
|
45
|
+
|
|
46
|
+
def update_parameter(parameter_name: str, formatter: str) -> None:
|
|
47
|
+
param_formatter = ctx.params.get(parameter_name, formatter)
|
|
48
|
+
if param_formatter:
|
|
49
|
+
# Enclose in double curly brackets all other parameters in the formatting string,
|
|
50
|
+
# to avoid the missing parameters' error. Below is an example of a formatter string
|
|
51
|
+
# before and after applying the regex, assuming the current parameter is 'version'.
|
|
52
|
+
# 'something-with-{version}/tailored-for-{user}' => 'something-with-{version}/tailored-for-{{user}}'
|
|
53
|
+
# We were looking for all occurrences of a pattern '{some_name}', where some_name is not version.
|
|
54
|
+
pattern = r'\{(?!' + (param.name or '') + r'\})\w+\}'
|
|
55
|
+
param_formatter = re.sub(pattern, lambda m: f'{{{m.group(0)}}}', param_formatter)
|
|
56
|
+
kwargs = {param.name: value}
|
|
57
|
+
ctx.params[parameter_name] = param_formatter.format(**kwargs)
|
|
58
|
+
|
|
59
|
+
if value is not None:
|
|
60
|
+
for prm_name, prm_formatter in self._formatters.items():
|
|
61
|
+
update_parameter(prm_name, prm_formatter)
|
|
62
|
+
|
|
63
|
+
return value
|
|
64
|
+
|
|
65
|
+
def set_formatter(self, custom_parameter: CustomizableParameters, formatter: str) -> None:
|
|
66
|
+
""" Sets a formatter for a customizable parameter. """
|
|
67
|
+
self._formatters[custom_parameter.name] = formatter
|
|
68
|
+
|
|
69
|
+
def clear_formatters(self):
|
|
70
|
+
""" Deletes all formatters, mainly for testing purposes. """
|
|
71
|
+
self._formatters.clear()
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
# Global cli customization object.
|
|
75
|
+
# Specialized versions of this cli should use this object to set custom parameter formatters.
|
|
76
|
+
slc_parameter_formatters = _ParameterFormatters()
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
# This text will be displayed instead of the actual value, if found in an environment
|
|
80
|
+
# variable, in a prompt.
|
|
81
|
+
SECRET_DISPLAY = '***'
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
class SecretParams(Enum):
|
|
85
|
+
"""
|
|
86
|
+
This enum serves as a definition of confidential parameters which values should not be
|
|
87
|
+
displayed in the console, unless the user types them in the command line.
|
|
88
|
+
|
|
89
|
+
The enum name is also the name of the environment variable where the correspondent
|
|
90
|
+
secret value can be stored.
|
|
91
|
+
|
|
92
|
+
The enum value is also the name of the cli parameter.
|
|
93
|
+
"""
|
|
94
|
+
DB_PASSWORD = 'db-pass'
|
|
95
|
+
BUCKETFS_PASSWORD = 'bucketfs-password'
|
|
96
|
+
SAAS_ACCOUNT_ID = 'saas-account-id'
|
|
97
|
+
SAAS_DATABASE_ID = 'saas-database-id'
|
|
98
|
+
SAAS_TOKEN = 'saas-token'
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
def secret_callback(ctx: click.Context, param: click.Option, value: Any):
|
|
102
|
+
"""
|
|
103
|
+
Here we try to get the secret parameter value from an environment variable.
|
|
104
|
+
The reason for doing this in the callback instead of using a callable default is
|
|
105
|
+
that we don't want the default to be displayed in the prompt. There seems to
|
|
106
|
+
be no way of altering this behaviour.
|
|
107
|
+
"""
|
|
108
|
+
if value == SECRET_DISPLAY:
|
|
109
|
+
secret_param = SecretParams(param.opts[0][2:])
|
|
110
|
+
return os.environ.get(secret_param.name)
|
|
111
|
+
return value
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
@click.command(name="language-container")
|
|
115
|
+
@click.option('--bucketfs-name', type=str)
|
|
116
|
+
@click.option('--bucketfs-host', type=str)
|
|
117
|
+
@click.option('--bucketfs-port', type=int)
|
|
118
|
+
@click.option('--bucketfs-use-https', type=bool, default=False)
|
|
119
|
+
@click.option('--bucketfs-user', type=str)
|
|
120
|
+
@click.option(f'--{SecretParams.BUCKETFS_PASSWORD.value}', type=str,
|
|
121
|
+
prompt='BucketFS password', prompt_required=False,
|
|
122
|
+
hide_input=True, default=SECRET_DISPLAY, callback=secret_callback)
|
|
123
|
+
@click.option('--bucket', type=str)
|
|
124
|
+
@click.option('--saas-url', type=str,
|
|
125
|
+
default='https://cloud.exasol.com')
|
|
126
|
+
@click.option(f'--{SecretParams.SAAS_ACCOUNT_ID.value}', type=str,
|
|
127
|
+
prompt='SaaS account id', prompt_required=False,
|
|
128
|
+
hide_input=True, default=SECRET_DISPLAY, callback=secret_callback)
|
|
129
|
+
@click.option(f'--{SecretParams.SAAS_DATABASE_ID.value}', type=str,
|
|
130
|
+
prompt='SaaS database id', prompt_required=False,
|
|
131
|
+
hide_input=True, default=SECRET_DISPLAY, callback=secret_callback)
|
|
132
|
+
@click.option('--saas-database-name', type=str)
|
|
133
|
+
@click.option(f'--{SecretParams.SAAS_TOKEN.value}', type=str,
|
|
134
|
+
prompt='SaaS token', prompt_required=False,
|
|
135
|
+
hide_input=True, default=SECRET_DISPLAY, callback=secret_callback)
|
|
136
|
+
@click.option('--path-in-bucket', type=str)
|
|
137
|
+
@click.option('--container-file',
|
|
138
|
+
type=click.Path(exists=True, file_okay=True))
|
|
139
|
+
@click.option('--version', type=str, expose_value=False,
|
|
140
|
+
callback=slc_parameter_formatters)
|
|
141
|
+
@click.option('--dsn', type=str)
|
|
142
|
+
@click.option('--db-user', type=str)
|
|
143
|
+
@click.option(f'--{SecretParams.DB_PASSWORD.value}', type=str,
|
|
144
|
+
prompt='DB password', prompt_required=False,
|
|
145
|
+
hide_input=True, default=SECRET_DISPLAY, callback=secret_callback)
|
|
146
|
+
@click.option('--language-alias', type=str, default="PYTHON3_EXT")
|
|
147
|
+
@click.option('--ssl-cert-path', type=str, default="")
|
|
148
|
+
@click.option('--ssl-client-cert-path', type=str, default="")
|
|
149
|
+
@click.option('--ssl-client-private-key', type=str, default="")
|
|
150
|
+
@click.option('--use-ssl-cert-validation/--no-use-ssl-cert-validation', type=bool, default=True)
|
|
151
|
+
@click.option('--upload-container/--no-upload_container', type=bool, default=True)
|
|
152
|
+
@click.option('--alter-system/--no-alter-system', type=bool, default=True)
|
|
153
|
+
@click.option('--allow-override/--disallow-override', type=bool, default=False)
|
|
154
|
+
@click.option('--wait_for_completion/--no-wait_for_completion', type=bool, default=True)
|
|
155
|
+
def language_container_deployer_main(
|
|
156
|
+
bucketfs_name: str,
|
|
157
|
+
bucketfs_host: str,
|
|
158
|
+
bucketfs_port: int,
|
|
159
|
+
bucketfs_use_https: bool,
|
|
160
|
+
bucketfs_user: str,
|
|
161
|
+
bucketfs_password: str,
|
|
162
|
+
bucket: str,
|
|
163
|
+
saas_url: str,
|
|
164
|
+
saas_account_id: str,
|
|
165
|
+
saas_database_id: str,
|
|
166
|
+
saas_database_name: str,
|
|
167
|
+
saas_token: str,
|
|
168
|
+
path_in_bucket: str,
|
|
169
|
+
container_file: str,
|
|
170
|
+
dsn: str,
|
|
171
|
+
db_user: str,
|
|
172
|
+
db_pass: str,
|
|
173
|
+
language_alias: str,
|
|
174
|
+
ssl_cert_path: str,
|
|
175
|
+
ssl_client_cert_path: str,
|
|
176
|
+
ssl_client_private_key: str,
|
|
177
|
+
use_ssl_cert_validation: bool,
|
|
178
|
+
upload_container: bool,
|
|
179
|
+
alter_system: bool,
|
|
180
|
+
allow_override: bool,
|
|
181
|
+
wait_for_completion: bool,
|
|
182
|
+
container_url: Optional[str] = None,
|
|
183
|
+
container_name: Optional[str] = None):
|
|
184
|
+
|
|
185
|
+
deployer = LanguageContainerDeployer.create(
|
|
186
|
+
bucketfs_name=bucketfs_name,
|
|
187
|
+
bucketfs_host=bucketfs_host,
|
|
188
|
+
bucketfs_port=bucketfs_port,
|
|
189
|
+
bucketfs_use_https=bucketfs_use_https,
|
|
190
|
+
bucketfs_user=bucketfs_user,
|
|
191
|
+
bucketfs_password=bucketfs_password,
|
|
192
|
+
bucket=bucket,
|
|
193
|
+
saas_url=saas_url,
|
|
194
|
+
saas_account_id=saas_account_id,
|
|
195
|
+
saas_database_id=saas_database_id,
|
|
196
|
+
saas_database_name=saas_database_name,
|
|
197
|
+
saas_token=saas_token,
|
|
198
|
+
path_in_bucket=path_in_bucket,
|
|
199
|
+
dsn=dsn,
|
|
200
|
+
db_user=db_user,
|
|
201
|
+
db_password=db_pass,
|
|
202
|
+
language_alias=language_alias,
|
|
203
|
+
ssl_trusted_ca=ssl_cert_path,
|
|
204
|
+
ssl_client_certificate=ssl_client_cert_path,
|
|
205
|
+
ssl_private_key=ssl_client_private_key,
|
|
206
|
+
use_ssl_cert_validation=use_ssl_cert_validation)
|
|
207
|
+
|
|
208
|
+
if not upload_container:
|
|
209
|
+
deployer.run(alter_system=alter_system, allow_override=allow_override,
|
|
210
|
+
wait_for_completion=wait_for_completion)
|
|
211
|
+
elif container_file:
|
|
212
|
+
deployer.run(container_file=Path(container_file), alter_system=alter_system,
|
|
213
|
+
allow_override=allow_override, wait_for_completion=wait_for_completion)
|
|
214
|
+
elif container_url and container_name:
|
|
215
|
+
deployer.download_and_run(container_url, container_name, alter_system=alter_system,
|
|
216
|
+
allow_override=allow_override, wait_for_completion=wait_for_completion)
|
|
217
|
+
else:
|
|
218
|
+
# The error message should mention the parameters which the callback is specified for being missed.
|
|
219
|
+
raise ValueError("To upload a language container you should specify either its "
|
|
220
|
+
"release version or a path of the already downloaded container file.")
|
|
@@ -0,0 +1,140 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
from typing import Generator
|
|
3
|
+
from datetime import timedelta
|
|
4
|
+
import random
|
|
5
|
+
import string
|
|
6
|
+
from contextlib import contextmanager
|
|
7
|
+
from textwrap import dedent
|
|
8
|
+
|
|
9
|
+
from tenacity import retry
|
|
10
|
+
from tenacity.wait import wait_fixed
|
|
11
|
+
from tenacity.stop import stop_after_delay, stop_after_attempt
|
|
12
|
+
|
|
13
|
+
import pyexasol # type: ignore
|
|
14
|
+
|
|
15
|
+
_DUMMY_UDF_NAME = 'DUMMY_UDF'
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def _get_test_udf_name(schema: str | None) -> str:
|
|
19
|
+
|
|
20
|
+
if schema:
|
|
21
|
+
return f'"{schema}"."{_DUMMY_UDF_NAME}"'
|
|
22
|
+
return f'"{_DUMMY_UDF_NAME}"'
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def _create_dummy_udf(conn: pyexasol.ExaConnection, language_alias: str,
|
|
26
|
+
schema: str | None) -> None:
|
|
27
|
+
|
|
28
|
+
udf_name = _get_test_udf_name(schema)
|
|
29
|
+
sql = dedent(f"""
|
|
30
|
+
CREATE OR REPLACE {language_alias} SCALAR SCRIPT {udf_name}()
|
|
31
|
+
RETURNS DECIMAL(1, 0) AS
|
|
32
|
+
|
|
33
|
+
def run(ctx):
|
|
34
|
+
return 0
|
|
35
|
+
/
|
|
36
|
+
""")
|
|
37
|
+
conn.execute(sql)
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def _call_dummy_udf(conn: pyexasol.ExaConnection, schema: str | None) -> None:
|
|
41
|
+
|
|
42
|
+
udf_name = _get_test_udf_name(schema)
|
|
43
|
+
sql = dedent(f"""
|
|
44
|
+
SELECT {udf_name}()
|
|
45
|
+
GROUP BY IPROC();
|
|
46
|
+
""")
|
|
47
|
+
result = conn.execute(sql).fetchall()
|
|
48
|
+
assert result == [(0,)]
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def _delete_dummy_udf(conn: pyexasol.ExaConnection, schema: str | None) -> None:
|
|
52
|
+
|
|
53
|
+
udf_name = _get_test_udf_name(schema)
|
|
54
|
+
sql = dedent(f"""
|
|
55
|
+
DROP SCRIPT IF EXISTS {udf_name};
|
|
56
|
+
""")
|
|
57
|
+
conn.execute(sql)
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
@retry(reraise=True, stop=stop_after_attempt(3))
|
|
61
|
+
def _create_random_schema(conn: pyexasol.ExaConnection, schema_name_length: int) -> str:
|
|
62
|
+
|
|
63
|
+
schema = ''.join(random.choice(string.ascii_letters)
|
|
64
|
+
for _ in range(schema_name_length))
|
|
65
|
+
sql = f'CREATE SCHEMA "{schema}";'
|
|
66
|
+
conn.execute(query=sql)
|
|
67
|
+
return schema
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
def _delete_schema(conn: pyexasol.ExaConnection, schema: str) -> None:
|
|
71
|
+
|
|
72
|
+
sql = f'DROP SCHEMA IF EXISTS "{schema}" CASCADE;'
|
|
73
|
+
conn.execute(query=sql)
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def validate_language_container(conn: pyexasol.ExaConnection,
|
|
77
|
+
language_alias: str,
|
|
78
|
+
schema: str | None = None
|
|
79
|
+
) -> None:
|
|
80
|
+
"""
|
|
81
|
+
Runs a test to check if a language container has been installed and is now
|
|
82
|
+
operational. Will raise an exception if this is not the case.
|
|
83
|
+
|
|
84
|
+
conn - pyexasol connection. The language container must be activated either
|
|
85
|
+
at the SYSTEM level or at the SESSION associated with this connection.
|
|
86
|
+
language_alias - Language alias of the language container.
|
|
87
|
+
schema - The schema to run the tests in. If not specified the current schema
|
|
88
|
+
is assumed.
|
|
89
|
+
"""
|
|
90
|
+
try:
|
|
91
|
+
_create_dummy_udf(conn, language_alias, schema)
|
|
92
|
+
_call_dummy_udf(conn, schema)
|
|
93
|
+
finally:
|
|
94
|
+
_delete_dummy_udf(conn, schema)
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
def wait_language_container(conn: pyexasol.ExaConnection,
|
|
98
|
+
language_alias: str,
|
|
99
|
+
schema: str | None = None,
|
|
100
|
+
timeout: timedelta = timedelta(minutes=5),
|
|
101
|
+
interval: timedelta = timedelta(seconds=5),
|
|
102
|
+
) -> None:
|
|
103
|
+
"""
|
|
104
|
+
Keeps calling validate_language_container until it succeeds or the timeout expires.
|
|
105
|
+
|
|
106
|
+
conn - pyexasol connection. The language container must be activated either
|
|
107
|
+
at the SYSTEM level or at the SESSION associated with this connection.
|
|
108
|
+
language_alias - Language alias of the language container.
|
|
109
|
+
schema - The schema to run the tests in. If not specified the current schema
|
|
110
|
+
is assumed.
|
|
111
|
+
timeout - Will give up after this timeout expires. The last exception thrown
|
|
112
|
+
by the validate_language_container will be re-raised.
|
|
113
|
+
interval - The calls to validate_language_container are spaced by this time
|
|
114
|
+
interval.
|
|
115
|
+
"""
|
|
116
|
+
@retry(reraise=True, wait=wait_fixed(interval), stop=stop_after_delay(timeout))
|
|
117
|
+
def repeat_validate_language_container():
|
|
118
|
+
validate_language_container(conn, language_alias, schema)
|
|
119
|
+
|
|
120
|
+
repeat_validate_language_container()
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
@contextmanager
|
|
124
|
+
def temp_schema(conn: pyexasol.ExaConnection,
|
|
125
|
+
schema_name_length: int = 20
|
|
126
|
+
) -> Generator[str, None, None]:
|
|
127
|
+
"""
|
|
128
|
+
A context manager for running an operation in a newly created temporary schema.
|
|
129
|
+
The schema will be deleted after the operation is competed. Note, that all objects
|
|
130
|
+
created in this schema will be deleted with it. Returns the name of the created schema.
|
|
131
|
+
|
|
132
|
+
conn - pyexasol connection.
|
|
133
|
+
schema_name_length - Number of characters in the temporary schema name.
|
|
134
|
+
"""
|
|
135
|
+
schema = ''
|
|
136
|
+
try:
|
|
137
|
+
schema = _create_random_schema(conn, schema_name_length)
|
|
138
|
+
yield schema
|
|
139
|
+
finally:
|
|
140
|
+
_delete_schema(conn, schema)
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
[tool.poetry]
|
|
2
|
+
name = "exasol-python-extension-common"
|
|
3
|
+
version = "0.2.0"
|
|
4
|
+
description = "A collection of common utilities for Exasol extensions."
|
|
5
|
+
packages = [ {include = "exasol"}, ]
|
|
6
|
+
authors = ["Mikhail Beck <mikhail.beck@exasol.com>"]
|
|
7
|
+
license = "MIT"
|
|
8
|
+
readme = "README.md"
|
|
9
|
+
|
|
10
|
+
[tool.poetry.dependencies]
|
|
11
|
+
python = ">=3.8.0,<4.0"
|
|
12
|
+
pyexasol = "^0.25.0"
|
|
13
|
+
exasol-bucketfs = ">=0.10.0"
|
|
14
|
+
click = "^8.1.7"
|
|
15
|
+
exasol-saas-api = ">=0.7.0,<1.0.0"
|
|
16
|
+
requests = "<2.32.0"
|
|
17
|
+
tenacity = "^8.3.0"
|
|
18
|
+
|
|
19
|
+
[tool.poetry.group.dev.dependencies]
|
|
20
|
+
pytest = "^7.2.0"
|
|
21
|
+
exasol-toolbox = ">=0.12.0"
|
|
22
|
+
exasol-script-languages-container-tool = "^0.18.2"
|
|
23
|
+
pytest-exasol-saas = ">=0.2.1,<1.0.0"
|
|
24
|
+
|
|
25
|
+
[build-system]
|
|
26
|
+
requires = ["poetry-core>=1.0.0"]
|
|
27
|
+
build-backend = "poetry.core.masonry.api"
|
|
28
|
+
|
|
29
|
+
[tool.coverage.run]
|
|
30
|
+
relative_files = true
|
|
31
|
+
source = [
|
|
32
|
+
"exasol",
|
|
33
|
+
]
|
|
34
|
+
|
|
35
|
+
[tool.coverage.report]
|
|
36
|
+
fail_under = 15
|
|
37
|
+
|
|
38
|
+
[tool.black]
|
|
39
|
+
line-length = 100
|
|
40
|
+
verbose = false
|
|
41
|
+
include = "\\.pyi?$"
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
[tool.isort]
|
|
45
|
+
profile = "black"
|
|
46
|
+
force_grid_wrap = 2
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
[tool.pylint.master]
|
|
50
|
+
errors-only = true
|
|
51
|
+
output-format = "colorized,json:.lint.json,text:.lint.txt"
|
|
52
|
+
|
|
53
|
+
[tool.pylint.format]
|
|
54
|
+
max-line-length = 100
|
|
55
|
+
max-module-lines = 800
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
[[tool.mypy.overrides]]
|
|
59
|
+
module = [
|
|
60
|
+
"exasol.toolbox.nox.tasks",
|
|
61
|
+
"test.*",
|
|
62
|
+
]
|
|
63
|
+
ignore_errors = true
|
|
64
|
+
ignore_missing_imports = true
|
|
65
|
+
|
|
66
|
+
[tool.pytest.ini_options]
|
|
67
|
+
markers = [
|
|
68
|
+
"saas: integration test that creates a db in SaaS.",
|
|
69
|
+
]
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
from setuptools import setup
|
|
3
|
+
|
|
4
|
+
packages = \
|
|
5
|
+
['exasol',
|
|
6
|
+
'exasol.python_extension_common',
|
|
7
|
+
'exasol.python_extension_common.deployment']
|
|
8
|
+
|
|
9
|
+
package_data = \
|
|
10
|
+
{'': ['*']}
|
|
11
|
+
|
|
12
|
+
install_requires = \
|
|
13
|
+
['click>=8.1.7,<9.0.0',
|
|
14
|
+
'exasol-bucketfs>=0.10.0',
|
|
15
|
+
'exasol-saas-api>=0.7.0,<1.0.0',
|
|
16
|
+
'pyexasol>=0.25.0,<0.26.0',
|
|
17
|
+
'requests<2.32.0',
|
|
18
|
+
'tenacity>=8.3.0,<9.0.0']
|
|
19
|
+
|
|
20
|
+
setup_kwargs = {
|
|
21
|
+
'name': 'exasol-python-extension-common',
|
|
22
|
+
'version': '0.2.0',
|
|
23
|
+
'description': 'A collection of common utilities for Exasol extensions.',
|
|
24
|
+
'long_description': '# Exasol Python Extension Common\n\nA package with common functionality, shared by Exasol Python Extensions, e.g.\n* [transformers-extension](https://github.com/exasol/transformers-extension)\n* [sagemaker-extension](https://github.com/exasol/sagemaker-extension)\n\n## Features\n\nA deployer for script language containers (SLC) to be used by UDF-based extensions of Exasol database requiring a special SLC.\n\n## More documentation\n\n* User Guide\n* [Developer Guide](doc/developer-guide.md)\n* [User Defined Functions (UDF)](https://docs.exasol.com/db/latest/database_concepts/udf_scripts.htm)\n* [Script Language Containers (SLC)](https://github.com/exasol/script-languages-release/)\n',
|
|
25
|
+
'author': 'Mikhail Beck',
|
|
26
|
+
'author_email': 'mikhail.beck@exasol.com',
|
|
27
|
+
'maintainer': 'None',
|
|
28
|
+
'maintainer_email': 'None',
|
|
29
|
+
'url': 'None',
|
|
30
|
+
'packages': packages,
|
|
31
|
+
'package_data': package_data,
|
|
32
|
+
'install_requires': install_requires,
|
|
33
|
+
'python_requires': '>=3.8.0,<4.0',
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
setup(**setup_kwargs)
|