cdata-connect 0.0.1.dev1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cdata_connect/LICENSE +21 -0
- cdata_connect/__init__.py +82 -0
- cdata_connect/connection.py +118 -0
- cdata_connect/cursor.py +366 -0
- cdata_connect/exceptions.py +42 -0
- cdata_connect/log.py +7 -0
- cdata_connect/util/__init__.py +0 -0
- cdata_connect/util/types.py +214 -0
- cdata_connect/version.py +5 -0
- cdata_connect-0.0.1.dev1.dist-info/METADATA +290 -0
- cdata_connect-0.0.1.dev1.dist-info/RECORD +14 -0
- cdata_connect-0.0.1.dev1.dist-info/WHEEL +5 -0
- cdata_connect-0.0.1.dev1.dist-info/licenses/LICENSE +21 -0
- cdata_connect-0.0.1.dev1.dist-info/top_level.txt +1 -0
cdata_connect/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2024 CData Software, Inc.
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
from typing import Optional
|
|
2
|
+
|
|
3
|
+
from .connection import Connection
|
|
4
|
+
from .exceptions import (
|
|
5
|
+
ConfigurationError,
|
|
6
|
+
DatabaseError,
|
|
7
|
+
DataError,
|
|
8
|
+
Error,
|
|
9
|
+
IntegrityError,
|
|
10
|
+
InterfaceError,
|
|
11
|
+
InternalError,
|
|
12
|
+
NotSupportedError,
|
|
13
|
+
OperationalError,
|
|
14
|
+
ProgrammingError,
|
|
15
|
+
Warning,
|
|
16
|
+
)
|
|
17
|
+
from .util.types import (
|
|
18
|
+
DBAPI_TYPE_STRING,
|
|
19
|
+
DBAPI_TYPE_BINARY,
|
|
20
|
+
DBAPI_TYPE_NUMBER,
|
|
21
|
+
DBAPI_TYPE_TIMESTAMP,
|
|
22
|
+
Date,
|
|
23
|
+
Time,
|
|
24
|
+
Timestamp,
|
|
25
|
+
Binary,
|
|
26
|
+
DateFromTicks,
|
|
27
|
+
TimeFromTicks,
|
|
28
|
+
TimestampFromTicks,
|
|
29
|
+
)
|
|
30
|
+
from .version import __version__
|
|
31
|
+
|
|
32
|
+
# PEP 249 mandatory module-level type objects
|
|
33
|
+
STRING = DBAPI_TYPE_STRING # 0
|
|
34
|
+
BINARY = DBAPI_TYPE_BINARY # 1
|
|
35
|
+
NUMBER = DBAPI_TYPE_NUMBER # 2
|
|
36
|
+
DATETIME = DBAPI_TYPE_TIMESTAMP # 3
|
|
37
|
+
ROWID = DBAPI_TYPE_STRING # No dedicated ROWID type; STRING is the standard fallback
|
|
38
|
+
|
|
39
|
+
# DB API 2.0 module attributes
|
|
40
|
+
apilevel = "2.0"
|
|
41
|
+
threadsafety = 1
|
|
42
|
+
paramstyle = "pyformat"
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def connect(
|
|
46
|
+
config_path: Optional[str] = None,
|
|
47
|
+
base_url: Optional[str] = None,
|
|
48
|
+
username: Optional[str] = None,
|
|
49
|
+
password: Optional[str] = None,
|
|
50
|
+
workspace: Optional[str] = None,
|
|
51
|
+
timeout: int = 30,
|
|
52
|
+
max_retries: int = 3,
|
|
53
|
+
retry_delay: float = 1.0,
|
|
54
|
+
) -> Connection:
|
|
55
|
+
"""
|
|
56
|
+
Create a connection to the CData Connect API.
|
|
57
|
+
|
|
58
|
+
:param base_url: The base URL of the CData Connect API.
|
|
59
|
+
:param username: The username for authentication.
|
|
60
|
+
:param password: The password for authentication.
|
|
61
|
+
:param config_path: Optional path to a pyhocon config file.
|
|
62
|
+
:param workspace: Optional workspace name to append as a query parameter.
|
|
63
|
+
:param timeout: HTTP request timeout in seconds. Default 30.
|
|
64
|
+
:param max_retries: Retry count for transient 5xx errors. Default 3.
|
|
65
|
+
:param retry_delay: Base seconds between retries (exponential backoff). Default 1.0.
|
|
66
|
+
:return: A Connection object.
|
|
67
|
+
"""
|
|
68
|
+
if not config_path and not (base_url and username and password):
|
|
69
|
+
raise InterfaceError(
|
|
70
|
+
"Either config_path or base_url, username, and password must be provided."
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
return Connection(
|
|
74
|
+
config_path=config_path,
|
|
75
|
+
base_url=base_url,
|
|
76
|
+
username=username,
|
|
77
|
+
password=password,
|
|
78
|
+
workspace=workspace,
|
|
79
|
+
timeout=timeout,
|
|
80
|
+
max_retries=max_retries,
|
|
81
|
+
retry_delay=retry_delay,
|
|
82
|
+
)
|
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
from pyhocon import ConfigFactory
|
|
2
|
+
from pyhocon.exceptions import ConfigException
|
|
3
|
+
from warnings import warn
|
|
4
|
+
|
|
5
|
+
from .cursor import Cursor
|
|
6
|
+
from .log import logger
|
|
7
|
+
from .exceptions import (
|
|
8
|
+
ConfigurationError, InterfaceError, Warning, Error,
|
|
9
|
+
DatabaseError, OperationalError, IntegrityError,
|
|
10
|
+
InternalError, ProgrammingError, NotSupportedError,
|
|
11
|
+
)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class Connection:
|
|
15
|
+
"""
|
|
16
|
+
Create a connection to the CData Connect API.
|
|
17
|
+
|
|
18
|
+
:param base_url: The base URL of the CData Connect API.
|
|
19
|
+
:param username: The username for authentication.
|
|
20
|
+
:param password: The password for authentication.
|
|
21
|
+
:param config_path: Optional path to a config file in pyhocon format.
|
|
22
|
+
:param workspace: Optional arg to access a specific CData workspace.
|
|
23
|
+
:param timeout: HTTP request timeout in seconds (connect + read). Default 30.
|
|
24
|
+
:param max_retries: Number of retries on transient 5xx / connection errors. Default 3.
|
|
25
|
+
:param retry_delay: Base delay in seconds between retries (exponential backoff). Default 1.0.
|
|
26
|
+
"""
|
|
27
|
+
# DBAPI Extension: supply exceptions as attributes on the connection
|
|
28
|
+
Warning = property(lambda self: self._getError(Warning))
|
|
29
|
+
Error = property(lambda self: self._getError(Error))
|
|
30
|
+
InterfaceError = property(lambda self: self._getError(InterfaceError))
|
|
31
|
+
DatabaseError = property(lambda self: self._getError(DatabaseError))
|
|
32
|
+
OperationalError = property(lambda self: self._getError(OperationalError))
|
|
33
|
+
IntegrityError = property(lambda self: self._getError(IntegrityError))
|
|
34
|
+
InternalError = property(lambda self: self._getError(InternalError))
|
|
35
|
+
ProgrammingError = property(lambda self: self._getError(ProgrammingError))
|
|
36
|
+
NotSupportedError = property(lambda self: self._getError(NotSupportedError))
|
|
37
|
+
|
|
38
|
+
def _getError(self: "Connection", error):
|
|
39
|
+
warn("DB-API extension connection.%s used" % error.__name__, stacklevel=3)
|
|
40
|
+
return error
|
|
41
|
+
|
|
42
|
+
def __init__(self, base_url: str = None, username: str = None,
|
|
43
|
+
password: str = None, config_path: str = None,
|
|
44
|
+
workspace: str = None, timeout: int = 30,
|
|
45
|
+
max_retries: int = 3, retry_delay: float = 1.0):
|
|
46
|
+
|
|
47
|
+
self.is_open = True
|
|
48
|
+
self.timeout = timeout
|
|
49
|
+
self.max_retries = max_retries
|
|
50
|
+
self.retry_delay = retry_delay
|
|
51
|
+
self.workspace = workspace
|
|
52
|
+
|
|
53
|
+
if config_path:
|
|
54
|
+
logger.debug("Connection - Initialising Connection with config")
|
|
55
|
+
try:
|
|
56
|
+
config = ConfigFactory.parse_file(config_path)
|
|
57
|
+
except (ConfigException, FileNotFoundError) as e:
|
|
58
|
+
raise ConfigurationError(
|
|
59
|
+
f"Configuration file could not be loaded: {str(e)}"
|
|
60
|
+
) from e
|
|
61
|
+
|
|
62
|
+
try:
|
|
63
|
+
config_base_url = config.get_string('cdata_api_db.base_url')
|
|
64
|
+
self.base_url = config_base_url.rstrip('/')
|
|
65
|
+
self.auth = (config.get_string('cdata_api_db.username'),
|
|
66
|
+
config.get_string('cdata_api_db.password'))
|
|
67
|
+
except (ConfigException, KeyError) as e:
|
|
68
|
+
raise ConfigurationError(
|
|
69
|
+
f"Missing required configuration in cdata_api_db block: {str(e)}"
|
|
70
|
+
) from e
|
|
71
|
+
|
|
72
|
+
else:
|
|
73
|
+
logger.debug("Connection - Initialising Connection with "
|
|
74
|
+
"username and password")
|
|
75
|
+
|
|
76
|
+
if base_url is None or username is None or password is None:
|
|
77
|
+
raise ConfigurationError(
|
|
78
|
+
"Missing required parameters: base_url, username, "
|
|
79
|
+
"and password must be provided."
|
|
80
|
+
)
|
|
81
|
+
|
|
82
|
+
self.base_url = base_url.rstrip('/')
|
|
83
|
+
self.auth = (username, password)
|
|
84
|
+
|
|
85
|
+
# ------------------------------------------------------------------
|
|
86
|
+
# Context manager (PEP 249 optional extension)
|
|
87
|
+
# ------------------------------------------------------------------
|
|
88
|
+
|
|
89
|
+
def __enter__(self):
|
|
90
|
+
return self
|
|
91
|
+
|
|
92
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
93
|
+
self.close()
|
|
94
|
+
return False
|
|
95
|
+
|
|
96
|
+
# ------------------------------------------------------------------
|
|
97
|
+
# DB API 2.0 methods
|
|
98
|
+
# ------------------------------------------------------------------
|
|
99
|
+
|
|
100
|
+
def commit(self):
|
|
101
|
+
"""Commit any pending transaction. No-op — underlying API has no transactions."""
|
|
102
|
+
if self.is_open is False:
|
|
103
|
+
raise InterfaceError("Cannot commit because the connection is already closed.")
|
|
104
|
+
|
|
105
|
+
def rollback(self):
|
|
106
|
+
"""Rollback any pending transaction. No-op — underlying API has no transactions."""
|
|
107
|
+
if self.is_open is False:
|
|
108
|
+
raise InterfaceError("Cannot rollback because the connection is already closed.")
|
|
109
|
+
|
|
110
|
+
def close(self):
|
|
111
|
+
if self.is_open is False:
|
|
112
|
+
raise InterfaceError("The connection is already closed")
|
|
113
|
+
self.is_open = False
|
|
114
|
+
|
|
115
|
+
def cursor(self):
|
|
116
|
+
if self.is_open is False:
|
|
117
|
+
raise InterfaceError("Cannot create cursor: connection is closed")
|
|
118
|
+
return Cursor(self)
|
cdata_connect/cursor.py
ADDED
|
@@ -0,0 +1,366 @@
|
|
|
1
|
+
import time
|
|
2
|
+
import typing
|
|
3
|
+
from urllib.parse import quote
|
|
4
|
+
|
|
5
|
+
import ijson
|
|
6
|
+
import requests
|
|
7
|
+
|
|
8
|
+
from .log import logger
|
|
9
|
+
from .exceptions import (
|
|
10
|
+
InterfaceError, OperationalError, DataError, DatabaseError,
|
|
11
|
+
ProgrammingError,
|
|
12
|
+
)
|
|
13
|
+
from .util.types import (
|
|
14
|
+
TYPE_CONNECT_TO_PYTHON, TYPE_PYTHON_TO_CONNECT,
|
|
15
|
+
type_codes, convert_to_python_type,
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class Cursor:
|
|
20
|
+
"""
|
|
21
|
+
DB API 2.0 (PEP 249) cursor initialized by the Connection class.
|
|
22
|
+
|
|
23
|
+
:param connection: The parent Connection instance.
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
def __init__(self, connection):
|
|
27
|
+
self.connection = connection
|
|
28
|
+
self.schema = None
|
|
29
|
+
self.rows_generator = None
|
|
30
|
+
self.response = None
|
|
31
|
+
self.json_reader = None
|
|
32
|
+
self.current_row = 0
|
|
33
|
+
self._rowcount = -1
|
|
34
|
+
|
|
35
|
+
# ------------------------------------------------------------------
|
|
36
|
+
# Context manager & iterator protocol (PEP 249 optional extensions)
|
|
37
|
+
# ------------------------------------------------------------------
|
|
38
|
+
|
|
39
|
+
def __enter__(self):
|
|
40
|
+
return self
|
|
41
|
+
|
|
42
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
43
|
+
self.close()
|
|
44
|
+
return False
|
|
45
|
+
|
|
46
|
+
def __iter__(self):
|
|
47
|
+
return self
|
|
48
|
+
|
|
49
|
+
def __next__(self):
|
|
50
|
+
row = self.fetchone()
|
|
51
|
+
if row is None:
|
|
52
|
+
raise StopIteration
|
|
53
|
+
return row
|
|
54
|
+
|
|
55
|
+
# ------------------------------------------------------------------
|
|
56
|
+
# Internal helpers
|
|
57
|
+
# ------------------------------------------------------------------
|
|
58
|
+
|
|
59
|
+
def _check_connection(self):
|
|
60
|
+
if self.connection is None:
|
|
61
|
+
raise InterfaceError("Cursor is closed")
|
|
62
|
+
if not self.connection.is_open:
|
|
63
|
+
raise InterfaceError("Operation on closed connection is not allowed")
|
|
64
|
+
|
|
65
|
+
def _execute_request(self, url: str, json_object: dict) -> None:
|
|
66
|
+
"""Send an HTTP POST to the Connect API with timeout and retry."""
|
|
67
|
+
self._check_connection()
|
|
68
|
+
|
|
69
|
+
max_retries = self.connection.max_retries
|
|
70
|
+
retry_delay = self.connection.retry_delay
|
|
71
|
+
timeout = self.connection.timeout
|
|
72
|
+
|
|
73
|
+
last_exception = None
|
|
74
|
+
for attempt in range(max_retries + 1):
|
|
75
|
+
try:
|
|
76
|
+
# Close any previous response before retrying to avoid resource leaks
|
|
77
|
+
self._close_response()
|
|
78
|
+
|
|
79
|
+
logger.debug(
|
|
80
|
+
f"Cursor - Sending API request to {url} (attempt {attempt + 1})"
|
|
81
|
+
)
|
|
82
|
+
self.response = requests.post(
|
|
83
|
+
url,
|
|
84
|
+
auth=self.connection.auth,
|
|
85
|
+
json=json_object,
|
|
86
|
+
stream=True,
|
|
87
|
+
timeout=timeout,
|
|
88
|
+
)
|
|
89
|
+
|
|
90
|
+
if self.response.status_code == 200:
|
|
91
|
+
self.json_reader = ijson.parse(self.response.raw)
|
|
92
|
+
self._process_schema()
|
|
93
|
+
self._prepare_rows_reader()
|
|
94
|
+
self._rowcount = -1
|
|
95
|
+
return
|
|
96
|
+
|
|
97
|
+
# Transient server errors — eligible for retry
|
|
98
|
+
if self.response.status_code in (500, 502, 503, 504):
|
|
99
|
+
if attempt < max_retries:
|
|
100
|
+
wait = retry_delay * (2 ** attempt)
|
|
101
|
+
logger.warning(
|
|
102
|
+
f"Cursor - Transient server error "
|
|
103
|
+
f"{self.response.status_code}, retrying in "
|
|
104
|
+
f"{wait:.1f}s (attempt {attempt + 1}/{max_retries})"
|
|
105
|
+
)
|
|
106
|
+
time.sleep(wait)
|
|
107
|
+
continue
|
|
108
|
+
|
|
109
|
+
# Non-retryable error (4xx, or exhausted retries on 5xx)
|
|
110
|
+
error_string = (
|
|
111
|
+
f"Cursor - API request failed with status code "
|
|
112
|
+
f"{self.response.status_code}: {self.response.text}"
|
|
113
|
+
)
|
|
114
|
+
logger.error(error_string)
|
|
115
|
+
raise OperationalError(error_string)
|
|
116
|
+
|
|
117
|
+
except requests.exceptions.Timeout as e:
|
|
118
|
+
raise OperationalError(f"Request timed out: {str(e)}") from e
|
|
119
|
+
|
|
120
|
+
except requests.exceptions.ConnectionError as e:
|
|
121
|
+
last_exception = e
|
|
122
|
+
if attempt < max_retries:
|
|
123
|
+
wait = retry_delay * (2 ** attempt)
|
|
124
|
+
logger.warning(
|
|
125
|
+
f"Cursor - Connection error, retrying in {wait:.1f}s: {e}"
|
|
126
|
+
)
|
|
127
|
+
time.sleep(wait)
|
|
128
|
+
continue
|
|
129
|
+
break
|
|
130
|
+
|
|
131
|
+
except requests.exceptions.RequestException as e:
|
|
132
|
+
raise OperationalError(f"Error executing query: {str(e)}") from e
|
|
133
|
+
|
|
134
|
+
except ijson.JSONError as e:
|
|
135
|
+
raise DataError(f"Error parsing JSON response: {str(e)}") from e
|
|
136
|
+
|
|
137
|
+
except (OperationalError, InterfaceError, ProgrammingError, DataError,
|
|
138
|
+
DatabaseError):
|
|
139
|
+
raise # re-raise our own exceptions without wrapping
|
|
140
|
+
|
|
141
|
+
except Exception as e:
|
|
142
|
+
raise DatabaseError(f"Unexpected error occurred: {str(e)}") from e
|
|
143
|
+
|
|
144
|
+
raise OperationalError(
|
|
145
|
+
f"Request failed after {max_retries} retries: {str(last_exception)}"
|
|
146
|
+
)
|
|
147
|
+
|
|
148
|
+
def _process_schema(self) -> None:
|
|
149
|
+
self.schema = []
|
|
150
|
+
key = ''
|
|
151
|
+
for prefix, event, value in self.json_reader:
|
|
152
|
+
if (prefix, event) == ('results.item.schema.item', 'start_map'):
|
|
153
|
+
key = ''
|
|
154
|
+
current_schema_item = {}
|
|
155
|
+
elif (prefix, event) == ('results.item.schema.item', 'end_map'):
|
|
156
|
+
self.schema.append(current_schema_item)
|
|
157
|
+
elif (prefix, event) == ('results.item.schema.item', 'map_key'):
|
|
158
|
+
key = value
|
|
159
|
+
elif prefix == 'results.item.schema.item.%s' % key:
|
|
160
|
+
current_schema_item[key] = value
|
|
161
|
+
elif prefix == 'results.item.rows':
|
|
162
|
+
break
|
|
163
|
+
|
|
164
|
+
def _prepare_rows_reader(self) -> None:
|
|
165
|
+
if not self.schema:
|
|
166
|
+
# Non-row-returning statement (e.g. DELETE, INSERT, CREATE).
|
|
167
|
+
# Leave rows_generator as None so fetch calls raise ProgrammingError.
|
|
168
|
+
self.rows_generator = None
|
|
169
|
+
return
|
|
170
|
+
self.rows_generator = ijson.items(self.json_reader,
|
|
171
|
+
'results.item.rows.item')
|
|
172
|
+
|
|
173
|
+
def _convert_row(self, row: list) -> list:
|
|
174
|
+
data_type_names = [schema_item['dataTypeName'] for schema_item in self.schema]
|
|
175
|
+
return [
|
|
176
|
+
convert_to_python_type(value, data_type_name)
|
|
177
|
+
for data_type_name, value in zip(data_type_names, row)
|
|
178
|
+
]
|
|
179
|
+
|
|
180
|
+
def _build_url(self, endpoint: str) -> str:
|
|
181
|
+
"""Build API URL with optional workspace query parameter."""
|
|
182
|
+
url = f"{self.connection.base_url}/{endpoint}"
|
|
183
|
+
if self.connection.workspace:
|
|
184
|
+
url += f"?workspace={quote(self.connection.workspace)}"
|
|
185
|
+
return url
|
|
186
|
+
|
|
187
|
+
# ------------------------------------------------------------------
|
|
188
|
+
# DB API 2.0 properties
|
|
189
|
+
# ------------------------------------------------------------------
|
|
190
|
+
|
|
191
|
+
@property
|
|
192
|
+
def description(self) -> typing.Optional[typing.List[typing.Tuple]]:
|
|
193
|
+
if not self.schema:
|
|
194
|
+
return None
|
|
195
|
+
columns = []
|
|
196
|
+
for column in self.schema:
|
|
197
|
+
col_name = column["columnName"]
|
|
198
|
+
columns.append((
|
|
199
|
+
col_name,
|
|
200
|
+
TYPE_CONNECT_TO_PYTHON[column["dataType"]],
|
|
201
|
+
None, # display_size (not used)
|
|
202
|
+
column.get("length"),
|
|
203
|
+
column.get("precision"),
|
|
204
|
+
column.get("scale"),
|
|
205
|
+
column.get("nullable"),
|
|
206
|
+
))
|
|
207
|
+
return columns
|
|
208
|
+
|
|
209
|
+
@property
|
|
210
|
+
def rowcount(self) -> int:
|
|
211
|
+
return self._rowcount
|
|
212
|
+
|
|
213
|
+
@property
|
|
214
|
+
def arraysize(self) -> int:
|
|
215
|
+
return getattr(self, '_arraysize', 1)
|
|
216
|
+
|
|
217
|
+
@arraysize.setter
|
|
218
|
+
def arraysize(self, value: int) -> None:
|
|
219
|
+
self._arraysize = value
|
|
220
|
+
|
|
221
|
+
# ------------------------------------------------------------------
|
|
222
|
+
# DB API 2.0 methods
|
|
223
|
+
# ------------------------------------------------------------------
|
|
224
|
+
|
|
225
|
+
def _close_response(self) -> None:
|
|
226
|
+
"""Drain and close the current HTTP streaming response, if any."""
|
|
227
|
+
if self.response is not None:
|
|
228
|
+
try:
|
|
229
|
+
self.response.close()
|
|
230
|
+
except Exception:
|
|
231
|
+
pass
|
|
232
|
+
self.response = None
|
|
233
|
+
# Drop the ijson reader so it releases the socket reference too.
|
|
234
|
+
self.json_reader = None
|
|
235
|
+
|
|
236
|
+
def execute(self, query: str, params: typing.Optional[dict] = None) -> None:
|
|
237
|
+
self._check_connection()
|
|
238
|
+
self._rowcount = -1
|
|
239
|
+
self._close_response()
|
|
240
|
+
self.rows_generator = None
|
|
241
|
+
self.schema = None
|
|
242
|
+
|
|
243
|
+
if params is not None:
|
|
244
|
+
if not isinstance(params, dict):
|
|
245
|
+
raise ProgrammingError("params must be a dictionary")
|
|
246
|
+
# Apply pyformat substitution; params are sent as part of the query string.
|
|
247
|
+
# NOTE: server-side parameterization via the API 'parameters' field is
|
|
248
|
+
# preferred for production workloads. This keeps backward compatibility.
|
|
249
|
+
query = query % params
|
|
250
|
+
|
|
251
|
+
json_object = {"query": query}
|
|
252
|
+
self._execute_request(self._build_url("query"), json_object)
|
|
253
|
+
|
|
254
|
+
def executemany(self, query: str,
|
|
255
|
+
params: typing.Optional[list] = None) -> None:
|
|
256
|
+
self._check_connection()
|
|
257
|
+
self._close_response()
|
|
258
|
+
self.rows_generator = None
|
|
259
|
+
self.schema = None
|
|
260
|
+
|
|
261
|
+
json_object = {"query": query}
|
|
262
|
+
|
|
263
|
+
if params is not None:
|
|
264
|
+
if not isinstance(params, list):
|
|
265
|
+
raise ProgrammingError("params must be a list of dictionaries")
|
|
266
|
+
|
|
267
|
+
parameter_list = []
|
|
268
|
+
for param_dict in params:
|
|
269
|
+
parameter_item = {}
|
|
270
|
+
for key, value in param_dict.items():
|
|
271
|
+
data_type = type(value)
|
|
272
|
+
data_type_code = type_codes.get(data_type, 9)
|
|
273
|
+
data_type_connect = TYPE_PYTHON_TO_CONNECT[data_type_code]
|
|
274
|
+
parameter_item[key] = {
|
|
275
|
+
"dataType": data_type_connect,
|
|
276
|
+
"value": value,
|
|
277
|
+
}
|
|
278
|
+
parameter_list.append(parameter_item)
|
|
279
|
+
|
|
280
|
+
json_object["parameters"] = parameter_list
|
|
281
|
+
|
|
282
|
+
self._execute_request(self._build_url("batch"), json_object)
|
|
283
|
+
|
|
284
|
+
def callproc(self, procedure: str,
|
|
285
|
+
params: typing.Optional[tuple] = None) -> tuple:
|
|
286
|
+
self._check_connection()
|
|
287
|
+
self._rowcount = -1
|
|
288
|
+
self._close_response()
|
|
289
|
+
self.rows_generator = None
|
|
290
|
+
self.schema = None
|
|
291
|
+
|
|
292
|
+
if params is None:
|
|
293
|
+
args = []
|
|
294
|
+
elif isinstance(params, tuple):
|
|
295
|
+
args = list(params)
|
|
296
|
+
else:
|
|
297
|
+
args = [params]
|
|
298
|
+
|
|
299
|
+
json_object = {
|
|
300
|
+
"procedure": procedure,
|
|
301
|
+
"parameters": {},
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
for key, value in enumerate(args, start=1):
|
|
305
|
+
data_type = type(value)
|
|
306
|
+
data_type_code = type_codes.get(data_type, 9)
|
|
307
|
+
data_type_connect = TYPE_PYTHON_TO_CONNECT[data_type_code]
|
|
308
|
+
json_object["parameters"][f"@{key}"] = {
|
|
309
|
+
"dataType": data_type_connect,
|
|
310
|
+
"value": value,
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
self._execute_request(self._build_url("exec"), json_object)
|
|
314
|
+
return tuple(args)
|
|
315
|
+
|
|
316
|
+
def close(self) -> None:
|
|
317
|
+
# Allow closing even if the connection is already closed —
|
|
318
|
+
# the cursor should still be able to release its own resources.
|
|
319
|
+
self._close_response()
|
|
320
|
+
self.rows_generator = None
|
|
321
|
+
self.connection = None
|
|
322
|
+
|
|
323
|
+
def fetchone(self) -> typing.Optional[list]:
|
|
324
|
+
self._check_connection()
|
|
325
|
+
if self.rows_generator is None:
|
|
326
|
+
raise ProgrammingError(
|
|
327
|
+
"fetchone() called before execute() or on a non-row-returning statement"
|
|
328
|
+
)
|
|
329
|
+
try:
|
|
330
|
+
current_row = next(self.rows_generator)
|
|
331
|
+
return self._convert_row(current_row)
|
|
332
|
+
except StopIteration:
|
|
333
|
+
return None
|
|
334
|
+
|
|
335
|
+
def fetchall(self) -> list:
|
|
336
|
+
self._check_connection()
|
|
337
|
+
if self.rows_generator is None:
|
|
338
|
+
raise ProgrammingError(
|
|
339
|
+
"fetchall() called before execute() or on a non-row-returning statement"
|
|
340
|
+
)
|
|
341
|
+
processed_rows = []
|
|
342
|
+
for row in self.rows_generator:
|
|
343
|
+
processed_rows.append(self._convert_row(row))
|
|
344
|
+
return processed_rows
|
|
345
|
+
|
|
346
|
+
def fetchmany(self, size: typing.Optional[int] = None) -> list:
|
|
347
|
+
self._check_connection()
|
|
348
|
+
if self.rows_generator is None:
|
|
349
|
+
raise ProgrammingError(
|
|
350
|
+
"fetchmany() called before execute() or on a non-row-returning statement"
|
|
351
|
+
)
|
|
352
|
+
if size is None:
|
|
353
|
+
size = self.arraysize
|
|
354
|
+
rows = []
|
|
355
|
+
for _ in range(size):
|
|
356
|
+
row = self.fetchone()
|
|
357
|
+
if row is None:
|
|
358
|
+
break
|
|
359
|
+
rows.append(row)
|
|
360
|
+
return rows
|
|
361
|
+
|
|
362
|
+
def setinputsizes(self, sizes) -> None:
|
|
363
|
+
"""DB API 2.0 optional method — no-op for this driver."""
|
|
364
|
+
|
|
365
|
+
def setoutputsize(self, size, column=None) -> None:
|
|
366
|
+
"""DB API 2.0 optional method — no-op for this driver."""
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
class Warning(Exception):
|
|
2
|
+
pass
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class Error(Exception):
|
|
6
|
+
pass
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class InterfaceError(Error):
|
|
10
|
+
pass
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class DatabaseError(Error):
|
|
14
|
+
pass
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class DataError(DatabaseError):
|
|
18
|
+
pass
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class OperationalError(DatabaseError):
|
|
22
|
+
pass
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class IntegrityError(DatabaseError):
|
|
26
|
+
pass
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class InternalError(DatabaseError):
|
|
30
|
+
pass
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class ProgrammingError(DatabaseError):
|
|
34
|
+
pass
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class NotSupportedError(DatabaseError):
|
|
38
|
+
pass
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class ConfigurationError(Error):
|
|
42
|
+
pass
|
cdata_connect/log.py
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
|
|
3
|
+
# Library code must not call basicConfig — that belongs to the application.
|
|
4
|
+
# Adding NullHandler suppresses "No handler found" warnings when the
|
|
5
|
+
# embedding application has not configured logging.
|
|
6
|
+
logger = logging.getLogger("cdata_connect")
|
|
7
|
+
logger.addHandler(logging.NullHandler())
|
|
File without changes
|
|
@@ -0,0 +1,214 @@
|
|
|
1
|
+
import datetime
|
|
2
|
+
import time
|
|
3
|
+
import types
|
|
4
|
+
import uuid
|
|
5
|
+
from collections import defaultdict
|
|
6
|
+
from decimal import Decimal
|
|
7
|
+
from typing import Any, DefaultDict, NamedTuple
|
|
8
|
+
|
|
9
|
+
DBAPI_TYPE_STRING = 0
|
|
10
|
+
DBAPI_TYPE_BINARY = 1
|
|
11
|
+
DBAPI_TYPE_NUMBER = 2
|
|
12
|
+
DBAPI_TYPE_TIMESTAMP = 3
|
|
13
|
+
|
|
14
|
+
type_codes = {
|
|
15
|
+
str: DBAPI_TYPE_STRING,
|
|
16
|
+
bool: DBAPI_TYPE_NUMBER,
|
|
17
|
+
int: DBAPI_TYPE_NUMBER,
|
|
18
|
+
float: DBAPI_TYPE_NUMBER,
|
|
19
|
+
Decimal: DBAPI_TYPE_NUMBER,
|
|
20
|
+
datetime.datetime: DBAPI_TYPE_TIMESTAMP,
|
|
21
|
+
datetime.date: DBAPI_TYPE_TIMESTAMP,
|
|
22
|
+
datetime.time: DBAPI_TYPE_TIMESTAMP,
|
|
23
|
+
bytes: DBAPI_TYPE_BINARY,
|
|
24
|
+
bytearray: DBAPI_TYPE_BINARY,
|
|
25
|
+
types.NoneType: 9, # For NoneType
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
CONNECT_TYPE_BINARY = 1
|
|
29
|
+
CONNECT_TYPE_VARCHAR = 5
|
|
30
|
+
CONNECT_TYPE_TINYINT = 6
|
|
31
|
+
CONNECT_TYPE_SMALLINT = 7
|
|
32
|
+
CONNECT_TYPE_INTEGER = 8
|
|
33
|
+
CONNECT_TYPE_BIGINT = 9
|
|
34
|
+
CONNECT_TYPE_FLOAT = 10
|
|
35
|
+
CONNECT_TYPE_DOUBLE = 11
|
|
36
|
+
CONNECT_TYPE_DECIMAL = 12
|
|
37
|
+
CONNECT_TYPE_NUMERIC = 13
|
|
38
|
+
CONNECT_TYPE_BOOLEAN = 14
|
|
39
|
+
CONNECT_TYPE_DATE = 15
|
|
40
|
+
CONNECT_TYPE_TIME = 16
|
|
41
|
+
CONNECT_TYPE_TIMESTAMP = 17
|
|
42
|
+
CONNECT_TYPE_UUID = 18
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class FieldType(NamedTuple):
|
|
46
|
+
connect_type: int
|
|
47
|
+
dbapi_type: int
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def convert_to_python_type(value, data_type):
|
|
51
|
+
if data_type in ('TINYINT', 'SMALLINT', 'INTEGER', 'BIGINT', 'INT'):
|
|
52
|
+
return value if value is None or isinstance(value, int) else int(str(value))
|
|
53
|
+
elif data_type in ('FLOAT', 'DOUBLE', 'REAL'):
|
|
54
|
+
return value if value is None or isinstance(value, float) else float(str(value))
|
|
55
|
+
elif data_type in ('DECIMAL', 'NUMERIC'):
|
|
56
|
+
return value if value is None or isinstance(value, Decimal) else Decimal(str(value))
|
|
57
|
+
elif data_type in ('BOOLEAN', 'BOOL'):
|
|
58
|
+
return value if value is None or isinstance(value, bool) else str(value).lower() in ('true', '1')
|
|
59
|
+
elif data_type == 'DATE':
|
|
60
|
+
return value if value is None or isinstance(value, datetime.date) else datetime.datetime.strptime(value, '%Y-%m-%d').date()
|
|
61
|
+
elif data_type == 'TIME':
|
|
62
|
+
return value if value is None or isinstance(value, datetime.time) else datetime.datetime.strptime(value, '%H:%M:%S').time()
|
|
63
|
+
elif data_type in ('TIMESTAMP', 'DATETIME'):
|
|
64
|
+
if value is None or isinstance(value, datetime.datetime):
|
|
65
|
+
return value
|
|
66
|
+
for fmt in ('%Y-%m-%dT%H:%M:%SZ', '%Y-%m-%dT%H:%M:%S', '%Y-%m-%d %H:%M:%S'):
|
|
67
|
+
try:
|
|
68
|
+
return datetime.datetime.strptime(value, fmt)
|
|
69
|
+
except ValueError:
|
|
70
|
+
continue
|
|
71
|
+
return value
|
|
72
|
+
elif data_type == 'UUID':
|
|
73
|
+
return value if value is None else uuid.UUID(value)
|
|
74
|
+
elif data_type == 'BINARY':
|
|
75
|
+
if value is None:
|
|
76
|
+
return None
|
|
77
|
+
if isinstance(value, (bytes, bytearray)):
|
|
78
|
+
return bytearray(value)
|
|
79
|
+
if isinstance(value, str):
|
|
80
|
+
import base64
|
|
81
|
+
return bytearray(base64.b64decode(value))
|
|
82
|
+
return bytearray(value)
|
|
83
|
+
elif data_type in ('VARCHAR', 'STRING', 'TEXT', 'CHAR', 'NVARCHAR', 'NCHAR'):
|
|
84
|
+
return value if value is None or isinstance(value, str) else str(value)
|
|
85
|
+
elif data_type == 'NULL':
|
|
86
|
+
return None
|
|
87
|
+
else:
|
|
88
|
+
# Unknown type — return value as-is rather than crashing
|
|
89
|
+
return value
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
FIELD_TYPES: tuple[FieldType, ...] = (
|
|
93
|
+
FieldType(
|
|
94
|
+
connect_type=CONNECT_TYPE_BINARY, dbapi_type=DBAPI_TYPE_BINARY
|
|
95
|
+
),
|
|
96
|
+
FieldType(
|
|
97
|
+
connect_type=CONNECT_TYPE_VARCHAR, dbapi_type=DBAPI_TYPE_STRING
|
|
98
|
+
),
|
|
99
|
+
FieldType(
|
|
100
|
+
connect_type=CONNECT_TYPE_TINYINT, dbapi_type=DBAPI_TYPE_NUMBER
|
|
101
|
+
),
|
|
102
|
+
FieldType(
|
|
103
|
+
connect_type=CONNECT_TYPE_SMALLINT, dbapi_type=DBAPI_TYPE_NUMBER
|
|
104
|
+
),
|
|
105
|
+
FieldType(
|
|
106
|
+
connect_type=CONNECT_TYPE_INTEGER, dbapi_type=DBAPI_TYPE_NUMBER
|
|
107
|
+
),
|
|
108
|
+
FieldType(
|
|
109
|
+
connect_type=CONNECT_TYPE_BIGINT, dbapi_type=DBAPI_TYPE_NUMBER
|
|
110
|
+
),
|
|
111
|
+
FieldType(
|
|
112
|
+
connect_type=CONNECT_TYPE_FLOAT, dbapi_type=DBAPI_TYPE_NUMBER
|
|
113
|
+
),
|
|
114
|
+
FieldType(
|
|
115
|
+
connect_type=CONNECT_TYPE_DOUBLE, dbapi_type=DBAPI_TYPE_NUMBER
|
|
116
|
+
),
|
|
117
|
+
FieldType(
|
|
118
|
+
connect_type=CONNECT_TYPE_DECIMAL, dbapi_type=DBAPI_TYPE_NUMBER
|
|
119
|
+
),
|
|
120
|
+
FieldType(
|
|
121
|
+
connect_type=CONNECT_TYPE_NUMERIC, dbapi_type=DBAPI_TYPE_NUMBER
|
|
122
|
+
),
|
|
123
|
+
FieldType(
|
|
124
|
+
connect_type=CONNECT_TYPE_BOOLEAN, dbapi_type=DBAPI_TYPE_NUMBER
|
|
125
|
+
),
|
|
126
|
+
FieldType(
|
|
127
|
+
connect_type=CONNECT_TYPE_DATE, dbapi_type=DBAPI_TYPE_TIMESTAMP
|
|
128
|
+
),
|
|
129
|
+
FieldType(
|
|
130
|
+
connect_type=CONNECT_TYPE_TIME, dbapi_type=DBAPI_TYPE_TIMESTAMP
|
|
131
|
+
),
|
|
132
|
+
FieldType(
|
|
133
|
+
connect_type=CONNECT_TYPE_TIMESTAMP, dbapi_type=DBAPI_TYPE_TIMESTAMP
|
|
134
|
+
),
|
|
135
|
+
FieldType(
|
|
136
|
+
connect_type=CONNECT_TYPE_UUID, dbapi_type=DBAPI_TYPE_STRING
|
|
137
|
+
)
|
|
138
|
+
)
|
|
139
|
+
|
|
140
|
+
TYPE_CONNECT_TO_PYTHON: DefaultDict[Any, int] = defaultdict(int)
|
|
141
|
+
TYPE_PYTHON_TO_CONNECT: DefaultDict[int, str] = defaultdict(str)
|
|
142
|
+
|
|
143
|
+
for idx, field_type in enumerate(FIELD_TYPES):
|
|
144
|
+
TYPE_CONNECT_TO_PYTHON[field_type.connect_type] = field_type.dbapi_type
|
|
145
|
+
|
|
146
|
+
TYPE_PYTHON_TO_CONNECT[DBAPI_TYPE_STRING] = CONNECT_TYPE_VARCHAR
|
|
147
|
+
TYPE_PYTHON_TO_CONNECT[DBAPI_TYPE_NUMBER] = CONNECT_TYPE_INTEGER
|
|
148
|
+
TYPE_PYTHON_TO_CONNECT[DBAPI_TYPE_TIMESTAMP] = CONNECT_TYPE_TIMESTAMP
|
|
149
|
+
TYPE_PYTHON_TO_CONNECT[DBAPI_TYPE_BINARY] = CONNECT_TYPE_BINARY
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
class Date:
|
|
153
|
+
def __init__(self, year, month, day):
|
|
154
|
+
self.year = year
|
|
155
|
+
self.month = month
|
|
156
|
+
self.day = day
|
|
157
|
+
|
|
158
|
+
def __str__(self):
|
|
159
|
+
return f"{self.year:04}-{self.month:02}-{self.day:02}"
|
|
160
|
+
|
|
161
|
+
@classmethod
|
|
162
|
+
def fromtimestamp(cls, timestamp):
|
|
163
|
+
dt = datetime.date.fromtimestamp(timestamp)
|
|
164
|
+
return cls(dt.year, dt.month, dt.day)
|
|
165
|
+
|
|
166
|
+
|
|
167
|
+
class Time:
|
|
168
|
+
def __init__(self, hour, minute, second):
|
|
169
|
+
self.hour = hour
|
|
170
|
+
self.minute = minute
|
|
171
|
+
self.second = second
|
|
172
|
+
|
|
173
|
+
def __str__(self):
|
|
174
|
+
return f"{self.hour:02}:{self.minute:02}:{self.second:02}"
|
|
175
|
+
|
|
176
|
+
@classmethod
|
|
177
|
+
def fromtimestamp(cls, timestamp):
|
|
178
|
+
t = datetime.datetime.fromtimestamp(timestamp)
|
|
179
|
+
return cls(t.hour, t.minute, t.second)
|
|
180
|
+
|
|
181
|
+
|
|
182
|
+
class Timestamp:
|
|
183
|
+
def __init__(self, year, month, day, hour, minute, second):
|
|
184
|
+
self.year = year
|
|
185
|
+
self.month = month
|
|
186
|
+
self.day = day
|
|
187
|
+
self.hour = hour
|
|
188
|
+
self.minute = minute
|
|
189
|
+
self.second = second
|
|
190
|
+
|
|
191
|
+
def __str__(self):
|
|
192
|
+
return f"{self.year:04}-{self.month:02}-{self.day:02} {self.hour:02}:{self.minute:02}:{self.second:02}"
|
|
193
|
+
|
|
194
|
+
@classmethod
|
|
195
|
+
def fromtimestamp(cls, timestamp):
|
|
196
|
+
dt = datetime.datetime.fromtimestamp(timestamp)
|
|
197
|
+
return cls(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second)
|
|
198
|
+
|
|
199
|
+
|
|
200
|
+
def DateFromTicks(ticks):
|
|
201
|
+
return Date.fromtimestamp(ticks)
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
def TimeFromTicks(ticks):
|
|
205
|
+
return Time.fromtimestamp(ticks)
|
|
206
|
+
|
|
207
|
+
|
|
208
|
+
def TimestampFromTicks(ticks):
|
|
209
|
+
return Timestamp.fromtimestamp(ticks)
|
|
210
|
+
|
|
211
|
+
|
|
212
|
+
class Binary:
|
|
213
|
+
def __init__(self, data):
|
|
214
|
+
self.data = data
|
cdata_connect/version.py
ADDED
|
@@ -0,0 +1,290 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: cdata-connect
|
|
3
|
+
Version: 0.0.1.dev1
|
|
4
|
+
Summary: CData Connect Python DB-API 2.0 interface library
|
|
5
|
+
Author-email: "CData Software, Inc." <support@cdata.com>
|
|
6
|
+
License-Expression: MIT
|
|
7
|
+
Project-URL: Homepage, https://www.cdata.com/connect/
|
|
8
|
+
Project-URL: Repository, https://github.com/CDataSoftware/cloud-sdk-python
|
|
9
|
+
Project-URL: Bug Tracker, https://github.com/CDataSoftware/cloud-sdk-python/issues
|
|
10
|
+
Keywords: cdata,dbapi,database,connector
|
|
11
|
+
Classifier: Development Status :: 5 - Production/Stable
|
|
12
|
+
Classifier: Intended Audience :: Developers
|
|
13
|
+
Classifier: Natural Language :: English
|
|
14
|
+
Classifier: Operating System :: OS Independent
|
|
15
|
+
Classifier: Programming Language :: Python
|
|
16
|
+
Classifier: Programming Language :: Python :: 3
|
|
17
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
18
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
19
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
20
|
+
Classifier: Programming Language :: Python :: Implementation :: CPython
|
|
21
|
+
Classifier: Topic :: Database :: Front-Ends
|
|
22
|
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
23
|
+
Requires-Python: >=3.10
|
|
24
|
+
Description-Content-Type: text/markdown
|
|
25
|
+
License-File: LICENSE
|
|
26
|
+
Requires-Dist: requests>=2.28.0
|
|
27
|
+
Requires-Dist: ijson>=3.1.0
|
|
28
|
+
Requires-Dist: pyhocon>=0.3.60
|
|
29
|
+
Provides-Extra: full
|
|
30
|
+
Requires-Dist: pandas>=1.3.0; extra == "full"
|
|
31
|
+
Provides-Extra: dev
|
|
32
|
+
Requires-Dist: pytest>=7.0; extra == "dev"
|
|
33
|
+
Requires-Dist: pytest-mock>=3.10; extra == "dev"
|
|
34
|
+
Requires-Dist: requests; extra == "dev"
|
|
35
|
+
Dynamic: license-file
|
|
36
|
+
|
|
37
|
+
# ConnectAI Python Connector
|
|
38
|
+
|
|
39
|
+
A Python [DB-API 2.0 (PEP 249)](https://peps.python.org/pep-0249/) compliant connector for [CData Connect AI](https://www.cdata.com/connect/). Query any data source connected through Connect AI using the standard Python database interface.
|
|
40
|
+
|
|
41
|
+
## Installation
|
|
42
|
+
|
|
43
|
+
```bash
|
|
44
|
+
pip install cdata-connect
|
|
45
|
+
```
|
|
46
|
+
|
|
47
|
+
For pandas integration:
|
|
48
|
+
|
|
49
|
+
```bash
|
|
50
|
+
pip install "cdata-connect[full]"
|
|
51
|
+
```
|
|
52
|
+
|
|
53
|
+
## Quick Start
|
|
54
|
+
|
|
55
|
+
```python
|
|
56
|
+
import cdata_connect
|
|
57
|
+
|
|
58
|
+
conn = cdata_connect.connect(
|
|
59
|
+
base_url="https://cloud.cdata.com/api/",
|
|
60
|
+
username="you@example.com",
|
|
61
|
+
password="<your_personal_access_token>",
|
|
62
|
+
)
|
|
63
|
+
|
|
64
|
+
cursor = conn.cursor()
|
|
65
|
+
cursor.execute("SELECT * FROM [Salesforce1].[Salesforce].[Account]")
|
|
66
|
+
rows = cursor.fetchall()
|
|
67
|
+
for row in rows:
|
|
68
|
+
print(row)
|
|
69
|
+
|
|
70
|
+
conn.close()
|
|
71
|
+
```
|
|
72
|
+
|
|
73
|
+
## Configuration File
|
|
74
|
+
|
|
75
|
+
Use a [PyHOCON](https://github.com/chimpler/pyhocon) config file to keep credentials out of code:
|
|
76
|
+
|
|
77
|
+
```hocon
|
|
78
|
+
# config.conf
|
|
79
|
+
cdata_api_db {
|
|
80
|
+
base_url = "https://cloud.cdata.com/api/"
|
|
81
|
+
username = "you@example.com"
|
|
82
|
+
password = "<your_personal_access_token>"
|
|
83
|
+
}
|
|
84
|
+
```
|
|
85
|
+
|
|
86
|
+
```python
|
|
87
|
+
conn = cdata_connect.connect(config_path="config.conf")
|
|
88
|
+
```
|
|
89
|
+
|
|
90
|
+
## Parameterized Queries
|
|
91
|
+
|
|
92
|
+
```python
|
|
93
|
+
cursor.execute(
|
|
94
|
+
"SELECT * FROM [DB].[public].[users] WHERE city = %(city)s LIMIT %(limit)s",
|
|
95
|
+
{"city": "New York", "limit": 10},
|
|
96
|
+
)
|
|
97
|
+
```
|
|
98
|
+
|
|
99
|
+
## Batch Operations
|
|
100
|
+
|
|
101
|
+
```python
|
|
102
|
+
cursor.executemany(
|
|
103
|
+
"INSERT INTO [DB].[public].[cities] (city, id) VALUES (@city, @id)",
|
|
104
|
+
[
|
|
105
|
+
{"@city": {"dataType": 5, "value": "New York"}, "@id": {"dataType": 8, "value": 1}},
|
|
106
|
+
{"@city": {"dataType": 5, "value": "London"}, "@id": {"dataType": 8, "value": 2}},
|
|
107
|
+
],
|
|
108
|
+
)
|
|
109
|
+
```
|
|
110
|
+
|
|
111
|
+
## Stored Procedures
|
|
112
|
+
|
|
113
|
+
```python
|
|
114
|
+
cursor.callproc("[DB].[public].[my_procedure]", ("arg1", "arg2"))
|
|
115
|
+
rows = cursor.fetchall()
|
|
116
|
+
```
|
|
117
|
+
|
|
118
|
+
## Connection Options
|
|
119
|
+
|
|
120
|
+
| Parameter | Description | Default |
|
|
121
|
+
|-----------|-------------|---------|
|
|
122
|
+
| `base_url` | Connect AI API base URL | — |
|
|
123
|
+
| `username` | Authentication username | — |
|
|
124
|
+
| `password` | Personal access token | — |
|
|
125
|
+
| `config_path` | Path to PyHOCON config file | — |
|
|
126
|
+
| `workspace` | Connect AI workspace name | — |
|
|
127
|
+
| `timeout` | HTTP request timeout (seconds) | `30` |
|
|
128
|
+
| `max_retries` | Retries on transient 5xx errors | `3` |
|
|
129
|
+
| `retry_delay` | Base delay between retries (seconds) | `1.0` |
|
|
130
|
+
|
|
131
|
+
## DB-API 2.0 Compliance
|
|
132
|
+
|
|
133
|
+
| Attribute | Value |
|
|
134
|
+
|-----------|-------|
|
|
135
|
+
| `apilevel` | `"2.0"` |
|
|
136
|
+
| `threadsafety` | `3` |
|
|
137
|
+
| `paramstyle` | `"pyformat"` |
|
|
138
|
+
|
|
139
|
+
**Supported methods:** `connect()`, `cursor()`, `execute()`, `executemany()`, `callproc()`, `fetchone()`, `fetchmany()`, `fetchall()`, `close()`, `commit()`, `rollback()`
|
|
140
|
+
|
|
141
|
+
## Exception Hierarchy
|
|
142
|
+
|
|
143
|
+
```
|
|
144
|
+
cdata_connect.Error
|
|
145
|
+
├── InterfaceError
|
|
146
|
+
└── DatabaseError
|
|
147
|
+
├── DataError
|
|
148
|
+
├── OperationalError
|
|
149
|
+
├── IntegrityError
|
|
150
|
+
├── InternalError
|
|
151
|
+
├── ProgrammingError
|
|
152
|
+
└── NotSupportedError
|
|
153
|
+
```
|
|
154
|
+
|
|
155
|
+
## Requirements
|
|
156
|
+
|
|
157
|
+
- Python >= 3.10
|
|
158
|
+
- `requests >= 2.28.0`
|
|
159
|
+
- `ijson >= 3.1.0`
|
|
160
|
+
- `pyhocon >= 0.3.60`
|
|
161
|
+
|
|
162
|
+
## Running Tests
|
|
163
|
+
|
|
164
|
+
Tests are split into **unit** (no server) and **integration** (mock server auto-starts).
|
|
165
|
+
|
|
166
|
+
```bash
|
|
167
|
+
pip install -e ".[dev]"
|
|
168
|
+
|
|
169
|
+
# Unit tests only — fast, no server needed
|
|
170
|
+
pytest tests/unit/ -v
|
|
171
|
+
|
|
172
|
+
# Integration tests — mock server auto-starts on localhost
|
|
173
|
+
pytest tests/integration/ -v
|
|
174
|
+
|
|
175
|
+
# All tests
|
|
176
|
+
pytest tests/ -v
|
|
177
|
+
|
|
178
|
+
# Run against a live Connect AI endpoint
|
|
179
|
+
CDATA_BASE_URL=https://cloud.cdata.com/api \
|
|
180
|
+
CDATA_USERNAME=you@example.com \
|
|
181
|
+
CDATA_PASSWORD=<pat> \
|
|
182
|
+
SKIP_LIVE_TESTS=0 \
|
|
183
|
+
pytest tests/integration/ -v
|
|
184
|
+
```
|
|
185
|
+
|
|
186
|
+
See [`tests/README.md`](tests/README.md) for test organization details.
|
|
187
|
+
|
|
188
|
+
### Test Environment Variables
|
|
189
|
+
|
|
190
|
+
| Variable | Description | Default |
|
|
191
|
+
|----------|-------------|---------|
|
|
192
|
+
| `CDATA_BASE_URL` | API endpoint | `http://localhost:8080/api` |
|
|
193
|
+
| `CDATA_USERNAME` | Auth username | `test@example.com` |
|
|
194
|
+
| `CDATA_PASSWORD` | Auth password / PAT | `any_token` |
|
|
195
|
+
| `MOCK_PORT` | Mock server port | `8080` |
|
|
196
|
+
| `MOCK_SERVER_DIR` | Path to mock server | `../connect-ai-mock` |
|
|
197
|
+
| `SKIP_LIVE_TESTS` | Skip live API tests | `1` |
|
|
198
|
+
|
|
199
|
+
## Building from Source
|
|
200
|
+
|
|
201
|
+
Install the build tool:
|
|
202
|
+
|
|
203
|
+
```bash
|
|
204
|
+
pip install build
|
|
205
|
+
```
|
|
206
|
+
|
|
207
|
+
Build both the wheel and source distribution from the `connector/` directory:
|
|
208
|
+
|
|
209
|
+
```bash
|
|
210
|
+
cd connector
|
|
211
|
+
python -m build
|
|
212
|
+
```
|
|
213
|
+
|
|
214
|
+
Artifacts are written to `connector/dist/`:
|
|
215
|
+
|
|
216
|
+
```
|
|
217
|
+
dist/
|
|
218
|
+
├── cdata_connect-1.0.0-py3-none-any.whl # Wheel (preferred for install)
|
|
219
|
+
└── cdata_connect-1.0.0.tar.gz # Source distribution
|
|
220
|
+
```
|
|
221
|
+
|
|
222
|
+
Install the locally built wheel:
|
|
223
|
+
|
|
224
|
+
```bash
|
|
225
|
+
pip install dist/cdata_connect-1.0.0-py3-none-any.whl
|
|
226
|
+
```
|
|
227
|
+
|
|
228
|
+
### Publishing to PyPI
|
|
229
|
+
|
|
230
|
+
```bash
|
|
231
|
+
pip install twine
|
|
232
|
+
|
|
233
|
+
# Verify the package metadata before uploading
|
|
234
|
+
twine check dist/*
|
|
235
|
+
|
|
236
|
+
# Upload to TestPyPI first (recommended for a dry run)
|
|
237
|
+
twine upload --repository testpypi dist/*
|
|
238
|
+
|
|
239
|
+
# Upload to PyPI
|
|
240
|
+
twine upload dist/*
|
|
241
|
+
```
|
|
242
|
+
|
|
243
|
+
Store your PyPI credentials in `~/.pypirc` or pass them as environment variables:
|
|
244
|
+
|
|
245
|
+
```ini
|
|
246
|
+
# ~/.pypirc
|
|
247
|
+
[pypi]
|
|
248
|
+
username = __token__
|
|
249
|
+
password = pypi-<your-api-token>
|
|
250
|
+
```
|
|
251
|
+
|
|
252
|
+
## Demo Client
|
|
253
|
+
|
|
254
|
+
A demo script at the repo root (`client_demo.py`) exercises the connector end-to-end against the mock server. It covers:
|
|
255
|
+
|
|
256
|
+
- `SELECT` with `fetchall`, `fetchone`, and `fetchmany`
|
|
257
|
+
- `cursor.description` (column metadata)
|
|
258
|
+
- Parameterized queries (`pyformat`)
|
|
259
|
+
- Batch `INSERT` via `executemany` and `SELECT` to verify
|
|
260
|
+
- `DELETE` and confirm empty result
|
|
261
|
+
- Stored procedure via `callproc`
|
|
262
|
+
- Error handling (`OperationalError` on bad host)
|
|
263
|
+
|
|
264
|
+
**Step 1 — Start the mock server** (in one terminal):
|
|
265
|
+
|
|
266
|
+
```bash
|
|
267
|
+
cd connect-ai-mock
|
|
268
|
+
pip install -r requirements.txt
|
|
269
|
+
python run.py
|
|
270
|
+
# Server ready at http://localhost:8080
|
|
271
|
+
```
|
|
272
|
+
|
|
273
|
+
**Step 2 — Install the connector and run the demo** (in another terminal):
|
|
274
|
+
|
|
275
|
+
```bash
|
|
276
|
+
# Install from PyPI
|
|
277
|
+
pip install cdata-connect
|
|
278
|
+
|
|
279
|
+
# OR install the locally built wheel
|
|
280
|
+
pip install connector/dist/cdata_connect-1.0.0-py3-none-any.whl
|
|
281
|
+
|
|
282
|
+
# Run
|
|
283
|
+
python client_demo.py
|
|
284
|
+
```
|
|
285
|
+
|
|
286
|
+
Expected output covers all 9 demo sections and ends with `All done.`
|
|
287
|
+
|
|
288
|
+
## License
|
|
289
|
+
|
|
290
|
+
MIT — see [LICENSE](LICENSE)
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
cdata_connect/LICENSE,sha256=eoXgG4BOZBC6mSs-8kSD8QBpg3hKdQpuMt4UDCuHE2A,1077
|
|
2
|
+
cdata_connect/__init__.py,sha256=0KepkIKr-l6-rvT3Tm5FG3wabRgSDC4cDMaIUda_xEw,2335
|
|
3
|
+
cdata_connect/connection.py,sha256=ZZZk65ZWWHlyUXZsaPzpi39496gYoJqCzAq0qoscUTo,5027
|
|
4
|
+
cdata_connect/cursor.py,sha256=aNhUp2sgzfhBbMOR1GOKvPyKHB0OYQL07vubTKLsfBA,13196
|
|
5
|
+
cdata_connect/exceptions.py,sha256=ybGhHBIv0IlY-04MC9xR3cxNyPeBvRLGhATSHu22v1g,482
|
|
6
|
+
cdata_connect/log.py,sha256=b9RLOEu8I8x6kOBeB96MsmU9TqQWQcw5BhdAXgWHpHA,300
|
|
7
|
+
cdata_connect/version.py,sha256=j5KzpDu7U3pvSKMZ68Cu-SuCfKnMC7LEMBJMMjyiKGQ,217
|
|
8
|
+
cdata_connect/util/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
9
|
+
cdata_connect/util/types.py,sha256=4_KHxd0zhGEyKFI2wgAcGBtnCans5P1GsePWmnaCwzw,6733
|
|
10
|
+
cdata_connect-0.0.1.dev1.dist-info/licenses/LICENSE,sha256=eoXgG4BOZBC6mSs-8kSD8QBpg3hKdQpuMt4UDCuHE2A,1077
|
|
11
|
+
cdata_connect-0.0.1.dev1.dist-info/METADATA,sha256=NkQsNnpKIp6cFkpPIdI0irQVVIdtpnpzoSzK6JQOQMU,7463
|
|
12
|
+
cdata_connect-0.0.1.dev1.dist-info/WHEEL,sha256=YCfwYGOYMi5Jhw2fU4yNgwErybb2IX5PEwBKV4ZbdBo,91
|
|
13
|
+
cdata_connect-0.0.1.dev1.dist-info/top_level.txt,sha256=8rRcbnPrlWSwEnf-2zyGD6EL2RIgU7G8zYDfQAWRakE,14
|
|
14
|
+
cdata_connect-0.0.1.dev1.dist-info/RECORD,,
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2024 CData Software, Inc.
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
cdata_connect
|