singlestoredb 1.15.8__cp38-abi3-win32.whl → 1.16.0__cp38-abi3-win32.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of singlestoredb might be problematic. Click here for more details.
- _singlestoredb_accel.pyd +0 -0
- singlestoredb/__init__.py +1 -1
- singlestoredb/ai/__init__.py +0 -3
- singlestoredb/ai/chat.py +0 -38
- singlestoredb/ai/embeddings.py +0 -15
- singlestoredb/connection.py +2 -2
- singlestoredb/functions/ext/asgi.py +3 -3
- singlestoredb/functions/ext/rowdat_1.py +1 -1
- singlestoredb/functions/signature.py +1 -1
- singlestoredb/functions/typing/__init__.py +1 -1
- singlestoredb/functions/utils.py +1 -1
- singlestoredb/fusion/handler.py +1 -1
- singlestoredb/fusion/result.py +1 -1
- singlestoredb/http/connection.py +2 -2
- singlestoredb/management/manager.py +61 -0
- singlestoredb/management/utils.py +1 -1
- singlestoredb/management/workspace.py +6 -0
- singlestoredb/mysql/connection.py +1 -1
- singlestoredb/pytest.py +78 -9
- singlestoredb/tests/test_management.py +151 -124
- singlestoredb/utils/config.py +2 -2
- singlestoredb/utils/mogrify.py +1 -1
- singlestoredb/utils/results.py +2 -2
- singlestoredb/utils/xdict.py +4 -4
- {singlestoredb-1.15.8.dist-info → singlestoredb-1.16.0.dist-info}/METADATA +42 -22
- {singlestoredb-1.15.8.dist-info → singlestoredb-1.16.0.dist-info}/RECORD +30 -30
- {singlestoredb-1.15.8.dist-info → singlestoredb-1.16.0.dist-info}/WHEEL +1 -1
- {singlestoredb-1.15.8.dist-info → singlestoredb-1.16.0.dist-info}/top_level.txt +1 -0
- {singlestoredb-1.15.8.dist-info → singlestoredb-1.16.0.dist-info}/entry_points.txt +0 -0
- {singlestoredb-1.15.8.dist-info → singlestoredb-1.16.0.dist-info/licenses}/LICENSE +0 -0
_singlestoredb_accel.pyd
CHANGED
|
Binary file
|
singlestoredb/__init__.py
CHANGED
singlestoredb/ai/__init__.py
CHANGED
|
@@ -1,5 +1,2 @@
|
|
|
1
|
-
from .chat import SingleStoreChat # noqa: F401
|
|
2
1
|
from .chat import SingleStoreChatFactory # noqa: F401
|
|
3
|
-
from .chat import SingleStoreChatOpenAI # noqa: F401
|
|
4
|
-
from .embeddings import SingleStoreEmbeddings # noqa: F401
|
|
5
2
|
from .embeddings import SingleStoreEmbeddingsFactory # noqa: F401
|
singlestoredb/ai/chat.py
CHANGED
|
@@ -29,44 +29,6 @@ from botocore import UNSIGNED
|
|
|
29
29
|
from botocore.config import Config
|
|
30
30
|
|
|
31
31
|
|
|
32
|
-
class SingleStoreChatOpenAI(ChatOpenAI):
|
|
33
|
-
def __init__(self, model_name: str, api_key: Optional[str] = None, **kwargs: Any):
|
|
34
|
-
inference_api_manger = (
|
|
35
|
-
manage_workspaces().organizations.current.inference_apis
|
|
36
|
-
)
|
|
37
|
-
info = inference_api_manger.get(model_name=model_name)
|
|
38
|
-
token = (
|
|
39
|
-
api_key
|
|
40
|
-
if api_key is not None
|
|
41
|
-
else os.environ.get('SINGLESTOREDB_USER_TOKEN')
|
|
42
|
-
)
|
|
43
|
-
super().__init__(
|
|
44
|
-
base_url=info.connection_url,
|
|
45
|
-
api_key=token,
|
|
46
|
-
model=model_name,
|
|
47
|
-
**kwargs,
|
|
48
|
-
)
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
class SingleStoreChat(ChatOpenAI):
|
|
52
|
-
def __init__(self, model_name: str, api_key: Optional[str] = None, **kwargs: Any):
|
|
53
|
-
inference_api_manger = (
|
|
54
|
-
manage_workspaces().organizations.current.inference_apis
|
|
55
|
-
)
|
|
56
|
-
info = inference_api_manger.get(model_name=model_name)
|
|
57
|
-
token = (
|
|
58
|
-
api_key
|
|
59
|
-
if api_key is not None
|
|
60
|
-
else os.environ.get('SINGLESTOREDB_USER_TOKEN')
|
|
61
|
-
)
|
|
62
|
-
super().__init__(
|
|
63
|
-
base_url=info.connection_url,
|
|
64
|
-
api_key=token,
|
|
65
|
-
model=model_name,
|
|
66
|
-
**kwargs,
|
|
67
|
-
)
|
|
68
|
-
|
|
69
|
-
|
|
70
32
|
def SingleStoreChatFactory(
|
|
71
33
|
model_name: str,
|
|
72
34
|
api_key: Optional[str] = None,
|
singlestoredb/ai/embeddings.py
CHANGED
|
@@ -29,21 +29,6 @@ from botocore import UNSIGNED
|
|
|
29
29
|
from botocore.config import Config
|
|
30
30
|
|
|
31
31
|
|
|
32
|
-
class SingleStoreEmbeddings(OpenAIEmbeddings):
|
|
33
|
-
|
|
34
|
-
def __init__(self, model_name: str, **kwargs: Any):
|
|
35
|
-
inference_api_manger = (
|
|
36
|
-
manage_workspaces().organizations.current.inference_apis
|
|
37
|
-
)
|
|
38
|
-
info = inference_api_manger.get(model_name=model_name)
|
|
39
|
-
super().__init__(
|
|
40
|
-
base_url=info.connection_url,
|
|
41
|
-
api_key=os.environ.get('SINGLESTOREDB_USER_TOKEN'),
|
|
42
|
-
model=model_name,
|
|
43
|
-
**kwargs,
|
|
44
|
-
)
|
|
45
|
-
|
|
46
|
-
|
|
47
32
|
def SingleStoreEmbeddingsFactory(
|
|
48
33
|
model_name: str,
|
|
49
34
|
api_key: Optional[str] = None,
|
singlestoredb/connection.py
CHANGED
|
@@ -9,15 +9,15 @@ import re
|
|
|
9
9
|
import sys
|
|
10
10
|
import warnings
|
|
11
11
|
import weakref
|
|
12
|
+
from collections.abc import Iterator
|
|
12
13
|
from collections.abc import Mapping
|
|
13
14
|
from collections.abc import MutableMapping
|
|
15
|
+
from collections.abc import Sequence
|
|
14
16
|
from typing import Any
|
|
15
17
|
from typing import Callable
|
|
16
18
|
from typing import Dict
|
|
17
|
-
from typing import Iterator
|
|
18
19
|
from typing import List
|
|
19
20
|
from typing import Optional
|
|
20
|
-
from typing import Sequence
|
|
21
21
|
from typing import Tuple
|
|
22
22
|
from typing import Union
|
|
23
23
|
from urllib.parse import parse_qs
|
|
@@ -48,15 +48,15 @@ import urllib
|
|
|
48
48
|
import uuid
|
|
49
49
|
import zipfile
|
|
50
50
|
import zipimport
|
|
51
|
+
from collections.abc import Awaitable
|
|
52
|
+
from collections.abc import Iterable
|
|
53
|
+
from collections.abc import Sequence
|
|
51
54
|
from types import ModuleType
|
|
52
55
|
from typing import Any
|
|
53
|
-
from typing import Awaitable
|
|
54
56
|
from typing import Callable
|
|
55
57
|
from typing import Dict
|
|
56
|
-
from typing import Iterable
|
|
57
58
|
from typing import List
|
|
58
59
|
from typing import Optional
|
|
59
|
-
from typing import Sequence
|
|
60
60
|
from typing import Set
|
|
61
61
|
from typing import Tuple
|
|
62
62
|
from typing import Union
|
|
@@ -1,11 +1,11 @@
|
|
|
1
1
|
#!/usr/bin/env python3
|
|
2
2
|
import struct
|
|
3
3
|
import warnings
|
|
4
|
+
from collections.abc import Sequence
|
|
4
5
|
from io import BytesIO
|
|
5
6
|
from typing import Any
|
|
6
7
|
from typing import List
|
|
7
8
|
from typing import Optional
|
|
8
|
-
from typing import Sequence
|
|
9
9
|
from typing import Tuple
|
|
10
10
|
from typing import TYPE_CHECKING
|
|
11
11
|
|
|
@@ -9,12 +9,12 @@ import string
|
|
|
9
9
|
import sys
|
|
10
10
|
import types
|
|
11
11
|
import typing
|
|
12
|
+
from collections.abc import Sequence
|
|
12
13
|
from typing import Any
|
|
13
14
|
from typing import Callable
|
|
14
15
|
from typing import Dict
|
|
15
16
|
from typing import List
|
|
16
17
|
from typing import Optional
|
|
17
|
-
from typing import Sequence
|
|
18
18
|
from typing import Tuple
|
|
19
19
|
from typing import TypeVar
|
|
20
20
|
from typing import Union
|
singlestoredb/functions/utils.py
CHANGED
|
@@ -4,10 +4,10 @@ import struct
|
|
|
4
4
|
import sys
|
|
5
5
|
import types
|
|
6
6
|
import typing
|
|
7
|
+
from collections.abc import Iterable
|
|
7
8
|
from enum import Enum
|
|
8
9
|
from typing import Any
|
|
9
10
|
from typing import Dict
|
|
10
|
-
from typing import Iterable
|
|
11
11
|
from typing import Tuple
|
|
12
12
|
from typing import Union
|
|
13
13
|
|
singlestoredb/fusion/handler.py
CHANGED
|
@@ -5,10 +5,10 @@ import os
|
|
|
5
5
|
import re
|
|
6
6
|
import sys
|
|
7
7
|
import textwrap
|
|
8
|
+
from collections.abc import Iterable
|
|
8
9
|
from typing import Any
|
|
9
10
|
from typing import Callable
|
|
10
11
|
from typing import Dict
|
|
11
|
-
from typing import Iterable
|
|
12
12
|
from typing import List
|
|
13
13
|
from typing import Optional
|
|
14
14
|
from typing import Set
|
singlestoredb/fusion/result.py
CHANGED
singlestoredb/http/connection.py
CHANGED
|
@@ -10,13 +10,13 @@ import os
|
|
|
10
10
|
import re
|
|
11
11
|
import time
|
|
12
12
|
from base64 import b64decode
|
|
13
|
+
from collections.abc import Iterable
|
|
14
|
+
from collections.abc import Sequence
|
|
13
15
|
from typing import Any
|
|
14
16
|
from typing import Callable
|
|
15
17
|
from typing import Dict
|
|
16
|
-
from typing import Iterable
|
|
17
18
|
from typing import List
|
|
18
19
|
from typing import Optional
|
|
19
|
-
from typing import Sequence
|
|
20
20
|
from typing import Tuple
|
|
21
21
|
from typing import Union
|
|
22
22
|
from urllib.parse import urljoin
|
|
@@ -15,6 +15,7 @@ import requests
|
|
|
15
15
|
|
|
16
16
|
from .. import config
|
|
17
17
|
from ..exceptions import ManagementError
|
|
18
|
+
from ..exceptions import OperationalError
|
|
18
19
|
from .utils import get_token
|
|
19
20
|
|
|
20
21
|
|
|
@@ -310,3 +311,63 @@ class Manager(object):
|
|
|
310
311
|
out = getattr(self, f'get_{self.obj_type}')(out.id)
|
|
311
312
|
|
|
312
313
|
return out
|
|
314
|
+
|
|
315
|
+
def _wait_on_endpoint(
|
|
316
|
+
self,
|
|
317
|
+
out: Any,
|
|
318
|
+
interval: int = 10,
|
|
319
|
+
timeout: int = 300,
|
|
320
|
+
) -> Any:
|
|
321
|
+
"""
|
|
322
|
+
Wait for the endpoint to be ready by attempting to connect.
|
|
323
|
+
|
|
324
|
+
Parameters
|
|
325
|
+
----------
|
|
326
|
+
out : Any
|
|
327
|
+
Workspace object with a connect method
|
|
328
|
+
interval : int, optional
|
|
329
|
+
Interval between each connection attempt (default: 10 seconds)
|
|
330
|
+
timeout : int, optional
|
|
331
|
+
Maximum time to wait before raising an exception (default: 300 seconds)
|
|
332
|
+
|
|
333
|
+
Raises
|
|
334
|
+
------
|
|
335
|
+
ManagementError
|
|
336
|
+
If timeout is reached or endpoint is not available
|
|
337
|
+
|
|
338
|
+
Returns
|
|
339
|
+
-------
|
|
340
|
+
Same object type as `out`
|
|
341
|
+
|
|
342
|
+
"""
|
|
343
|
+
# Only wait if workload type is set which means we are in the
|
|
344
|
+
# notebook environment. Outside of the environment, the endpoint
|
|
345
|
+
# may not be reachable directly.
|
|
346
|
+
if not os.environ.get('SINGLESTOREDB_WORKLOAD_TYPE', ''):
|
|
347
|
+
return out
|
|
348
|
+
|
|
349
|
+
if not hasattr(out, 'connect') or not out.connect:
|
|
350
|
+
raise ManagementError(
|
|
351
|
+
msg=f'{type(out).__name__} object does not have a valid endpoint',
|
|
352
|
+
)
|
|
353
|
+
|
|
354
|
+
while True:
|
|
355
|
+
try:
|
|
356
|
+
# Try to establish a connection to the endpoint using context manager
|
|
357
|
+
with out.connect(connect_timeout=5):
|
|
358
|
+
pass
|
|
359
|
+
except Exception as exc:
|
|
360
|
+
# If we get an 'access denied' error, that means that the server is
|
|
361
|
+
# up and we just aren't authenticating.
|
|
362
|
+
if isinstance(exc, OperationalError) and exc.errno == 1045:
|
|
363
|
+
break
|
|
364
|
+
# If connection fails, check timeout and retry
|
|
365
|
+
if timeout <= 0:
|
|
366
|
+
raise ManagementError(
|
|
367
|
+
msg=f'Exceeded waiting time for {self.obj_type} endpoint '
|
|
368
|
+
'to become ready',
|
|
369
|
+
)
|
|
370
|
+
time.sleep(interval)
|
|
371
|
+
timeout -= interval
|
|
372
|
+
|
|
373
|
+
return out
|
|
@@ -6,11 +6,11 @@ import itertools
|
|
|
6
6
|
import os
|
|
7
7
|
import re
|
|
8
8
|
import sys
|
|
9
|
+
from collections.abc import Mapping
|
|
9
10
|
from typing import Any
|
|
10
11
|
from typing import Callable
|
|
11
12
|
from typing import Dict
|
|
12
13
|
from typing import List
|
|
13
|
-
from typing import Mapping
|
|
14
14
|
from typing import Optional
|
|
15
15
|
from typing import SupportsIndex
|
|
16
16
|
from typing import Tuple
|
|
@@ -1794,6 +1794,12 @@ class WorkspaceManager(Manager):
|
|
|
1794
1794
|
interval=wait_interval,
|
|
1795
1795
|
timeout=wait_timeout,
|
|
1796
1796
|
)
|
|
1797
|
+
# After workspace is active, wait for endpoint to be ready
|
|
1798
|
+
out = self._wait_on_endpoint(
|
|
1799
|
+
out,
|
|
1800
|
+
interval=wait_interval,
|
|
1801
|
+
timeout=wait_timeout,
|
|
1802
|
+
)
|
|
1797
1803
|
return out
|
|
1798
1804
|
|
|
1799
1805
|
def get_workspace_group(self, id: str) -> WorkspaceGroup:
|
singlestoredb/pytest.py
CHANGED
|
@@ -2,10 +2,12 @@
|
|
|
2
2
|
"""Pytest plugin"""
|
|
3
3
|
import logging
|
|
4
4
|
import os
|
|
5
|
+
import socket
|
|
5
6
|
import subprocess
|
|
6
7
|
import time
|
|
8
|
+
import uuid
|
|
9
|
+
from collections.abc import Iterator
|
|
7
10
|
from enum import Enum
|
|
8
|
-
from typing import Iterator
|
|
9
11
|
from typing import Optional
|
|
10
12
|
|
|
11
13
|
import pytest
|
|
@@ -28,6 +30,14 @@ TEARDOWN_WAIT_ATTEMPTS = 20
|
|
|
28
30
|
TEARDOWN_WAIT_SECONDS = 2
|
|
29
31
|
|
|
30
32
|
|
|
33
|
+
def _find_free_port() -> int:
|
|
34
|
+
"""Find a free port by binding to port 0 and getting the assigned port."""
|
|
35
|
+
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
|
|
36
|
+
s.bind(('', 0))
|
|
37
|
+
s.listen(1)
|
|
38
|
+
return s.getsockname()[1]
|
|
39
|
+
|
|
40
|
+
|
|
31
41
|
class ExecutionMode(Enum):
|
|
32
42
|
SEQUENTIAL = 1
|
|
33
43
|
LEADER = 2
|
|
@@ -79,7 +89,11 @@ class _TestContainerManager():
|
|
|
79
89
|
"""Manages the setup and teardown of a SingleStoreDB Dev Container"""
|
|
80
90
|
|
|
81
91
|
def __init__(self) -> None:
|
|
82
|
-
|
|
92
|
+
# Generate unique container name using UUID and worker ID
|
|
93
|
+
worker = os.environ.get('PYTEST_XDIST_WORKER', 'master')
|
|
94
|
+
unique_id = uuid.uuid4().hex[:8]
|
|
95
|
+
self.container_name = f'singlestoredb-test-{worker}-{unique_id}'
|
|
96
|
+
|
|
83
97
|
self.dev_image_name = 'ghcr.io/singlestore-labs/singlestoredb-dev'
|
|
84
98
|
|
|
85
99
|
assert 'SINGLESTORE_LICENSE' in os.environ, 'SINGLESTORE_LICENSE not set'
|
|
@@ -91,14 +105,69 @@ class _TestContainerManager():
|
|
|
91
105
|
'SINGLESTORE_SET_GLOBAL_DEFAULT_PARTITIONS_PER_LEAF': '1',
|
|
92
106
|
}
|
|
93
107
|
|
|
94
|
-
|
|
108
|
+
# Use dynamic port allocation to avoid conflicts
|
|
109
|
+
self.mysql_port = _find_free_port()
|
|
110
|
+
self.http_port = _find_free_port()
|
|
111
|
+
self.studio_port = _find_free_port()
|
|
112
|
+
self.ports = [
|
|
113
|
+
(self.mysql_port, '3306'), # External port -> Internal port
|
|
114
|
+
(self.http_port, '8080'),
|
|
115
|
+
(self.studio_port, '9000'),
|
|
116
|
+
]
|
|
117
|
+
|
|
118
|
+
self.url = f'root:{self.root_password}@127.0.0.1:{self.mysql_port}'
|
|
119
|
+
|
|
120
|
+
def _container_exists(self) -> bool:
|
|
121
|
+
"""Check if a container with this name already exists."""
|
|
122
|
+
try:
|
|
123
|
+
result = subprocess.run(
|
|
124
|
+
[
|
|
125
|
+
'docker', 'ps', '-a', '--filter',
|
|
126
|
+
f'name={self.container_name}',
|
|
127
|
+
'--format', '{{.Names}}',
|
|
128
|
+
],
|
|
129
|
+
capture_output=True,
|
|
130
|
+
text=True,
|
|
131
|
+
check=True,
|
|
132
|
+
)
|
|
133
|
+
return self.container_name in result.stdout
|
|
134
|
+
except subprocess.CalledProcessError:
|
|
135
|
+
return False
|
|
136
|
+
|
|
137
|
+
def _cleanup_existing_container(self) -> None:
|
|
138
|
+
"""Stop and remove any existing container with the same name."""
|
|
139
|
+
if not self._container_exists():
|
|
140
|
+
return
|
|
95
141
|
|
|
96
|
-
|
|
142
|
+
logger.info(f'Found existing container {self.container_name}, cleaning up')
|
|
143
|
+
try:
|
|
144
|
+
# Try to stop the container (ignore if it's already stopped)
|
|
145
|
+
subprocess.run(
|
|
146
|
+
['docker', 'stop', self.container_name],
|
|
147
|
+
capture_output=True,
|
|
148
|
+
check=False,
|
|
149
|
+
)
|
|
150
|
+
# Remove the container
|
|
151
|
+
subprocess.run(
|
|
152
|
+
['docker', 'rm', self.container_name],
|
|
153
|
+
capture_output=True,
|
|
154
|
+
check=True,
|
|
155
|
+
)
|
|
156
|
+
logger.debug(f'Cleaned up existing container {self.container_name}')
|
|
157
|
+
except subprocess.CalledProcessError as e:
|
|
158
|
+
logger.warning(f'Failed to cleanup existing container: {e}')
|
|
159
|
+
# Continue anyway - the unique name should prevent most conflicts
|
|
97
160
|
|
|
98
161
|
def start(self) -> None:
|
|
162
|
+
# Clean up any existing container with the same name
|
|
163
|
+
self._cleanup_existing_container()
|
|
164
|
+
|
|
99
165
|
command = ' '.join(self._start_command())
|
|
100
166
|
|
|
101
|
-
logger.info(
|
|
167
|
+
logger.info(
|
|
168
|
+
f'Starting container {self.container_name} on ports {self.mysql_port}, '
|
|
169
|
+
f'{self.http_port}, {self.studio_port}',
|
|
170
|
+
)
|
|
102
171
|
try:
|
|
103
172
|
license = os.environ['SINGLESTORE_LICENSE']
|
|
104
173
|
env = {
|
|
@@ -108,8 +177,8 @@ class _TestContainerManager():
|
|
|
108
177
|
except Exception as e:
|
|
109
178
|
logger.exception(e)
|
|
110
179
|
raise RuntimeError(
|
|
111
|
-
'Failed to start container. '
|
|
112
|
-
'
|
|
180
|
+
f'Failed to start container {self.container_name}. '
|
|
181
|
+
f'Command: {command}',
|
|
113
182
|
) from e
|
|
114
183
|
logger.debug('Container started')
|
|
115
184
|
|
|
@@ -123,9 +192,9 @@ class _TestContainerManager():
|
|
|
123
192
|
else:
|
|
124
193
|
yield f'{key}={value}'
|
|
125
194
|
|
|
126
|
-
for
|
|
195
|
+
for external_port, internal_port in self.ports:
|
|
127
196
|
yield '-p'
|
|
128
|
-
yield f'{
|
|
197
|
+
yield f'{external_port}:{internal_port}'
|
|
129
198
|
|
|
130
199
|
yield self.dev_image_name
|
|
131
200
|
|
|
@@ -30,6 +30,7 @@ def shared_database_name(s):
|
|
|
30
30
|
return re.sub(r'[^\w]', '', s).replace('-', '_').lower()
|
|
31
31
|
|
|
32
32
|
|
|
33
|
+
@pytest.mark.skip(reason='Legacy cluster Management API is going away')
|
|
33
34
|
@pytest.mark.management
|
|
34
35
|
class TestCluster(unittest.TestCase):
|
|
35
36
|
|
|
@@ -675,184 +676,201 @@ class TestStage(unittest.TestCase):
|
|
|
675
676
|
def test_os_directories(self):
|
|
676
677
|
st = self.wg.stage
|
|
677
678
|
|
|
679
|
+
mkdir_test_1 = f'mkdir_test_1_{id(self)}'
|
|
680
|
+
mkdir_test_2 = f'mkdir_test_2_{id(self)}'
|
|
681
|
+
mkdir_test_3 = f'mkdir_test_3_{id(self)}'
|
|
682
|
+
|
|
678
683
|
# mkdir
|
|
679
|
-
st.mkdir(
|
|
680
|
-
st.mkdir(
|
|
684
|
+
st.mkdir(mkdir_test_1)
|
|
685
|
+
st.mkdir(mkdir_test_2)
|
|
681
686
|
with self.assertRaises(s2.ManagementError):
|
|
682
|
-
st.mkdir('mkdir_test_2/nest_1/nest_2')
|
|
683
|
-
st.mkdir('mkdir_test_2/nest_1')
|
|
684
|
-
st.mkdir('mkdir_test_2/nest_1/nest_2')
|
|
685
|
-
st.mkdir('mkdir_test_3')
|
|
686
|
-
|
|
687
|
-
assert st.exists('mkdir_test_1/')
|
|
688
|
-
assert st.exists('mkdir_test_2/')
|
|
689
|
-
assert st.exists('mkdir_test_2/nest_1/')
|
|
690
|
-
assert st.exists('mkdir_test_2/nest_1/nest_2/')
|
|
687
|
+
st.mkdir(f'{mkdir_test_2}/nest_1/nest_2')
|
|
688
|
+
st.mkdir(f'{mkdir_test_2}/nest_1')
|
|
689
|
+
st.mkdir(f'{mkdir_test_2}/nest_1/nest_2')
|
|
690
|
+
st.mkdir(f'{mkdir_test_3}')
|
|
691
|
+
|
|
692
|
+
assert st.exists(f'{mkdir_test_1}/')
|
|
693
|
+
assert st.exists(f'{mkdir_test_2}/')
|
|
694
|
+
assert st.exists(f'{mkdir_test_2}/nest_1/')
|
|
695
|
+
assert st.exists(f'{mkdir_test_2}/nest_1/nest_2/')
|
|
691
696
|
assert not st.exists('foo/')
|
|
692
697
|
assert not st.exists('foo/bar/')
|
|
693
698
|
|
|
694
|
-
assert st.is_dir('mkdir_test_1/')
|
|
695
|
-
assert st.is_dir('mkdir_test_2/')
|
|
696
|
-
assert st.is_dir('mkdir_test_2/nest_1/')
|
|
697
|
-
assert st.is_dir('mkdir_test_2/nest_1/nest_2/')
|
|
699
|
+
assert st.is_dir(f'{mkdir_test_1}/')
|
|
700
|
+
assert st.is_dir(f'{mkdir_test_2}/')
|
|
701
|
+
assert st.is_dir(f'{mkdir_test_2}/nest_1/')
|
|
702
|
+
assert st.is_dir(f'{mkdir_test_2}/nest_1/nest_2/')
|
|
698
703
|
|
|
699
|
-
assert not st.is_file('mkdir_test_1/')
|
|
700
|
-
assert not st.is_file('mkdir_test_2/')
|
|
701
|
-
assert not st.is_file('mkdir_test_2/nest_1/')
|
|
702
|
-
assert not st.is_file('mkdir_test_2/nest_1/nest_2/')
|
|
704
|
+
assert not st.is_file(f'{mkdir_test_1}/')
|
|
705
|
+
assert not st.is_file(f'{mkdir_test_2}/')
|
|
706
|
+
assert not st.is_file(f'{mkdir_test_2}/nest_1/')
|
|
707
|
+
assert not st.is_file(f'{mkdir_test_2}/nest_1/nest_2/')
|
|
703
708
|
|
|
704
709
|
out = st.listdir('/')
|
|
705
|
-
assert 'mkdir_test_1/' in out
|
|
706
|
-
assert 'mkdir_test_2/' in out
|
|
707
|
-
assert 'mkdir_test_2/nest_1/nest_2/' not in out
|
|
710
|
+
assert f'{mkdir_test_1}/' in out
|
|
711
|
+
assert f'{mkdir_test_2}/' in out
|
|
712
|
+
assert f'{mkdir_test_2}/nest_1/nest_2/' not in out
|
|
708
713
|
|
|
709
714
|
out = st.listdir('/', recursive=True)
|
|
710
|
-
assert 'mkdir_test_1/' in out
|
|
711
|
-
assert 'mkdir_test_2/' in out
|
|
712
|
-
assert 'mkdir_test_2/nest_1/nest_2/' in out
|
|
715
|
+
assert f'{mkdir_test_1}/' in out
|
|
716
|
+
assert f'{mkdir_test_2}/' in out
|
|
717
|
+
assert f'{mkdir_test_2}/nest_1/nest_2/' in out
|
|
713
718
|
|
|
714
|
-
out = st.listdir(
|
|
715
|
-
assert 'mkdir_test_1/' not in out
|
|
719
|
+
out = st.listdir(mkdir_test_2)
|
|
720
|
+
assert f'{mkdir_test_1}/' not in out
|
|
716
721
|
assert 'nest_1/' in out
|
|
717
722
|
assert 'nest_2/' not in out
|
|
718
723
|
assert 'nest_1/nest_2/' not in out
|
|
719
724
|
|
|
720
|
-
out = st.listdir(
|
|
721
|
-
assert 'mkdir_test_1/' not in out
|
|
725
|
+
out = st.listdir(mkdir_test_2, recursive=True)
|
|
726
|
+
assert f'{mkdir_test_1}/' not in out
|
|
722
727
|
assert 'nest_1/' in out
|
|
723
728
|
assert 'nest_2/' not in out
|
|
724
729
|
assert 'nest_1/nest_2/' in out
|
|
725
730
|
|
|
726
731
|
# rmdir
|
|
727
732
|
before = st.listdir('/', recursive=True)
|
|
728
|
-
st.rmdir('mkdir_test_1/')
|
|
733
|
+
st.rmdir(f'{mkdir_test_1}/')
|
|
729
734
|
after = st.listdir('/', recursive=True)
|
|
730
|
-
assert 'mkdir_test_1/' in before
|
|
731
|
-
assert 'mkdir_test_1/' not in after
|
|
732
|
-
assert list(sorted(before)) == list(sorted(after + ['mkdir_test_1/']))
|
|
735
|
+
assert f'{mkdir_test_1}/' in before
|
|
736
|
+
assert f'{mkdir_test_1}/' not in after
|
|
737
|
+
assert list(sorted(before)) == list(sorted(after + [f'{mkdir_test_1}/']))
|
|
733
738
|
|
|
734
739
|
with self.assertRaises(OSError):
|
|
735
|
-
st.rmdir('mkdir_test_2/')
|
|
740
|
+
st.rmdir(f'{mkdir_test_2}/')
|
|
741
|
+
|
|
742
|
+
mkdir_test_sql = f'mkdir_test_{id(self)}.sql'
|
|
736
743
|
|
|
737
|
-
st.upload_file(TEST_DIR / 'test.sql',
|
|
744
|
+
st.upload_file(TEST_DIR / 'test.sql', mkdir_test_sql)
|
|
738
745
|
|
|
739
746
|
with self.assertRaises(NotADirectoryError):
|
|
740
|
-
st.rmdir(
|
|
747
|
+
st.rmdir(mkdir_test_sql)
|
|
741
748
|
|
|
742
749
|
# removedirs
|
|
743
750
|
before = st.listdir('/')
|
|
744
|
-
st.removedirs('mkdir_test_2/')
|
|
751
|
+
st.removedirs(f'{mkdir_test_2}/')
|
|
745
752
|
after = st.listdir('/')
|
|
746
|
-
assert 'mkdir_test_2/' in before
|
|
747
|
-
assert 'mkdir_test_2/' not in after
|
|
748
|
-
assert list(sorted(before)) == list(sorted(after + ['mkdir_test_2/']))
|
|
753
|
+
assert f'{mkdir_test_2}/' in before
|
|
754
|
+
assert f'{mkdir_test_2}/' not in after
|
|
755
|
+
assert list(sorted(before)) == list(sorted(after + [f'{mkdir_test_2}/']))
|
|
749
756
|
|
|
750
757
|
with self.assertRaises(s2.ManagementError):
|
|
751
|
-
st.removedirs(
|
|
758
|
+
st.removedirs(mkdir_test_sql)
|
|
752
759
|
|
|
753
760
|
def test_os_files(self):
|
|
754
761
|
st = self.wg.stage
|
|
755
762
|
|
|
756
|
-
|
|
757
|
-
|
|
763
|
+
files_test_sql = f'files_test_{id(self)}.sql'
|
|
764
|
+
files_test_1_dir = f'files_test_1_{id(self)}'
|
|
765
|
+
|
|
766
|
+
st.mkdir(files_test_1_dir)
|
|
767
|
+
st.mkdir(f'{files_test_1_dir}/nest_1')
|
|
758
768
|
|
|
759
|
-
st.upload_file(TEST_DIR / 'test.sql',
|
|
760
|
-
st.upload_file(
|
|
769
|
+
st.upload_file(TEST_DIR / 'test.sql', files_test_sql)
|
|
770
|
+
st.upload_file(
|
|
771
|
+
TEST_DIR / 'test.sql',
|
|
772
|
+
f'{files_test_1_dir}/nest_1/nested_files_test.sql',
|
|
773
|
+
)
|
|
761
774
|
st.upload_file(
|
|
762
775
|
TEST_DIR / 'test.sql',
|
|
763
|
-
'
|
|
776
|
+
f'{files_test_1_dir}/nest_1/nested_files_test_2.sql',
|
|
764
777
|
)
|
|
765
778
|
|
|
766
779
|
# remove
|
|
767
780
|
with self.assertRaises(IsADirectoryError):
|
|
768
|
-
st.remove('
|
|
781
|
+
st.remove(f'{files_test_1_dir}/')
|
|
769
782
|
|
|
770
783
|
before = st.listdir('/')
|
|
771
|
-
st.remove(
|
|
784
|
+
st.remove(files_test_sql)
|
|
772
785
|
after = st.listdir('/')
|
|
773
|
-
assert
|
|
774
|
-
assert
|
|
775
|
-
assert list(sorted(before)) == list(sorted(after + [
|
|
786
|
+
assert files_test_sql in before
|
|
787
|
+
assert files_test_sql not in after
|
|
788
|
+
assert list(sorted(before)) == list(sorted(after + [files_test_sql]))
|
|
776
789
|
|
|
777
|
-
before = st.listdir('
|
|
778
|
-
st.remove('
|
|
779
|
-
after = st.listdir('
|
|
790
|
+
before = st.listdir(f'{files_test_1_dir}/nest_1/')
|
|
791
|
+
st.remove(f'{files_test_1_dir}/nest_1/nested_files_test.sql')
|
|
792
|
+
after = st.listdir(f'{files_test_1_dir}/nest_1/')
|
|
780
793
|
assert 'nested_files_test.sql' in before
|
|
781
794
|
assert 'nested_files_test.sql' not in after
|
|
782
|
-
assert st.is_dir('
|
|
795
|
+
assert st.is_dir(f'{files_test_1_dir}/nest_1/')
|
|
783
796
|
|
|
784
797
|
# Removing the last file does not remove empty directories
|
|
785
|
-
st.remove('
|
|
786
|
-
assert not st.is_file('
|
|
787
|
-
assert st.is_dir('
|
|
788
|
-
assert st.is_dir('
|
|
798
|
+
st.remove(f'{files_test_1_dir}/nest_1/nested_files_test_2.sql')
|
|
799
|
+
assert not st.is_file(f'{files_test_1_dir}/nest_1/nested_files_test_2.sql')
|
|
800
|
+
assert st.is_dir(f'{files_test_1_dir}/nest_1/')
|
|
801
|
+
assert st.is_dir(f'{files_test_1_dir}/')
|
|
789
802
|
|
|
790
|
-
st.removedirs(
|
|
791
|
-
assert not st.is_dir('
|
|
792
|
-
assert not st.is_dir('
|
|
803
|
+
st.removedirs(files_test_1_dir)
|
|
804
|
+
assert not st.is_dir(f'{files_test_1_dir}/nest_1/')
|
|
805
|
+
assert not st.is_dir(f'{files_test_1_dir}/')
|
|
793
806
|
|
|
794
807
|
def test_os_rename(self):
|
|
795
808
|
st = self.wg.stage
|
|
796
809
|
|
|
797
|
-
|
|
810
|
+
rename_test_sql = f'rename_test_{id(self)}.sql'
|
|
811
|
+
rename_test_2_sql = f'rename_test_2_{id(self)}.sql'
|
|
812
|
+
rename_test_1_dir = f'rename_test_1_{id(self)}'
|
|
813
|
+
rename_test_2_dir = f'rename_test_2_{id(self)}'
|
|
814
|
+
|
|
815
|
+
st.upload_file(TEST_DIR / 'test.sql', rename_test_sql)
|
|
798
816
|
|
|
799
817
|
with self.assertRaises(s2.ManagementError):
|
|
800
818
|
st.upload_file(
|
|
801
819
|
TEST_DIR / 'test.sql',
|
|
802
|
-
'
|
|
820
|
+
f'{rename_test_1_dir}/nest_1/nested_rename_test.sql',
|
|
803
821
|
)
|
|
804
822
|
|
|
805
|
-
st.mkdir(
|
|
806
|
-
st.mkdir('
|
|
823
|
+
st.mkdir(rename_test_1_dir)
|
|
824
|
+
st.mkdir(f'{rename_test_1_dir}/nest_1')
|
|
807
825
|
|
|
808
|
-
assert st.exists('/
|
|
826
|
+
assert st.exists(f'/{rename_test_1_dir}/nest_1/')
|
|
809
827
|
|
|
810
828
|
st.upload_file(
|
|
811
829
|
TEST_DIR / 'test.sql',
|
|
812
|
-
'
|
|
830
|
+
f'{rename_test_1_dir}/nest_1/nested_rename_test.sql',
|
|
813
831
|
)
|
|
814
832
|
|
|
815
833
|
st.upload_file(
|
|
816
834
|
TEST_DIR / 'test.sql',
|
|
817
|
-
'
|
|
835
|
+
f'{rename_test_1_dir}/nest_1/nested_rename_test_2.sql',
|
|
818
836
|
)
|
|
819
837
|
|
|
820
838
|
# rename file
|
|
821
|
-
assert
|
|
822
|
-
assert
|
|
823
|
-
st.rename(
|
|
824
|
-
assert
|
|
825
|
-
assert
|
|
839
|
+
assert rename_test_sql in st.listdir('/')
|
|
840
|
+
assert rename_test_2_sql not in st.listdir('/')
|
|
841
|
+
st.rename(rename_test_sql, rename_test_2_sql)
|
|
842
|
+
assert rename_test_sql not in st.listdir('/')
|
|
843
|
+
assert rename_test_2_sql in st.listdir('/')
|
|
826
844
|
|
|
827
845
|
# rename directory
|
|
828
|
-
assert '
|
|
829
|
-
assert '
|
|
830
|
-
st.rename('
|
|
831
|
-
assert '
|
|
832
|
-
assert '
|
|
833
|
-
assert st.is_file('
|
|
834
|
-
assert st.is_file('
|
|
846
|
+
assert f'{rename_test_1_dir}/' in st.listdir('/')
|
|
847
|
+
assert f'{rename_test_2_dir}/' not in st.listdir('/')
|
|
848
|
+
st.rename(f'{rename_test_1_dir}/', f'{rename_test_2_dir}/')
|
|
849
|
+
assert f'{rename_test_1_dir}/' not in st.listdir('/')
|
|
850
|
+
assert f'{rename_test_2_dir}/' in st.listdir('/')
|
|
851
|
+
assert st.is_file(f'{rename_test_2_dir}/nest_1/nested_rename_test.sql')
|
|
852
|
+
assert st.is_file(f'{rename_test_2_dir}/nest_1/nested_rename_test_2.sql')
|
|
835
853
|
|
|
836
854
|
# rename nested
|
|
837
|
-
assert '
|
|
855
|
+
assert f'{rename_test_2_dir}/nest_1/nested_rename_test.sql' in st.listdir(
|
|
838
856
|
'/', recursive=True,
|
|
839
857
|
)
|
|
840
|
-
assert '
|
|
858
|
+
assert f'{rename_test_2_dir}/nest_1/nested_rename_test_3.sql' not in st.listdir(
|
|
841
859
|
'/', recursive=True,
|
|
842
860
|
)
|
|
843
861
|
st.rename(
|
|
844
|
-
'
|
|
845
|
-
'
|
|
862
|
+
f'{rename_test_2_dir}/nest_1/nested_rename_test.sql',
|
|
863
|
+
f'{rename_test_2_dir}/nest_1/nested_rename_test_3.sql',
|
|
846
864
|
)
|
|
847
|
-
assert '
|
|
865
|
+
assert f'{rename_test_2_dir}/nest_1/nested_rename_test.sql' not in st.listdir(
|
|
848
866
|
'/', recursive=True,
|
|
849
867
|
)
|
|
850
|
-
assert '
|
|
868
|
+
assert f'{rename_test_2_dir}/nest_1/nested_rename_test_3.sql' in st.listdir(
|
|
851
869
|
'/', recursive=True,
|
|
852
870
|
)
|
|
853
|
-
assert not st.is_file('
|
|
854
|
-
assert st.is_file('
|
|
855
|
-
assert st.is_file('
|
|
871
|
+
assert not st.is_file(f'{rename_test_2_dir}/nest_1/nested_rename_test.sql')
|
|
872
|
+
assert st.is_file(f'{rename_test_2_dir}/nest_1/nested_rename_test_2.sql')
|
|
873
|
+
assert st.is_file(f'{rename_test_2_dir}/nest_1/nested_rename_test_3.sql')
|
|
856
874
|
|
|
857
875
|
# non-existent file
|
|
858
876
|
with self.assertRaises(OSError):
|
|
@@ -861,13 +879,13 @@ class TestStage(unittest.TestCase):
|
|
|
861
879
|
# overwrite
|
|
862
880
|
with self.assertRaises(OSError):
|
|
863
881
|
st.rename(
|
|
864
|
-
|
|
865
|
-
'
|
|
882
|
+
rename_test_2_sql,
|
|
883
|
+
f'{rename_test_2_dir}/nest_1/nested_rename_test_3.sql',
|
|
866
884
|
)
|
|
867
885
|
|
|
868
886
|
st.rename(
|
|
869
|
-
|
|
870
|
-
'
|
|
887
|
+
rename_test_2_sql,
|
|
888
|
+
f'{rename_test_2_dir}/nest_1/nested_rename_test_3.sql', overwrite=True,
|
|
871
889
|
)
|
|
872
890
|
|
|
873
891
|
def test_file_object(self):
|
|
@@ -1399,35 +1417,41 @@ class TestFileSpaces(unittest.TestCase):
|
|
|
1399
1417
|
space.remove(obj_open_test_ipynb)
|
|
1400
1418
|
|
|
1401
1419
|
def test_os_directories(self):
|
|
1420
|
+
mkdir_test_1_dir = f'mkdir_test_1_{id(self)}'
|
|
1421
|
+
|
|
1402
1422
|
for space in [self.personal_space, self.shared_space]:
|
|
1403
1423
|
# Make sure directories error out
|
|
1404
1424
|
with self.assertRaises(s2.ManagementError):
|
|
1405
|
-
space.mkdir(
|
|
1425
|
+
space.mkdir(mkdir_test_1_dir)
|
|
1406
1426
|
|
|
1407
1427
|
with self.assertRaises(s2.ManagementError):
|
|
1408
|
-
space.exists('
|
|
1428
|
+
space.exists(f'{mkdir_test_1_dir}/')
|
|
1409
1429
|
|
|
1410
1430
|
out = space.listdir('/')
|
|
1411
|
-
assert '
|
|
1431
|
+
assert f'{mkdir_test_1_dir}/' not in out
|
|
1412
1432
|
|
|
1413
1433
|
with self.assertRaises(s2.ManagementError):
|
|
1414
|
-
space.rmdir('
|
|
1434
|
+
space.rmdir(f'{mkdir_test_1_dir}/')
|
|
1415
1435
|
|
|
1416
1436
|
def test_os_rename(self):
|
|
1437
|
+
rename_test_ipynb = f'rename_test_{id(self)}.ipynb'
|
|
1438
|
+
rename_test_2_ipynb = f'rename_test_2_{id(self)}.ipynb'
|
|
1439
|
+
rename_test_3_ipynb = f'rename_test_3_{id(self)}.ipynb'
|
|
1440
|
+
|
|
1417
1441
|
for space in [self.personal_space, self.shared_space]:
|
|
1418
1442
|
space.upload_file(
|
|
1419
1443
|
TEST_DIR / 'test.ipynb',
|
|
1420
|
-
|
|
1444
|
+
rename_test_ipynb,
|
|
1421
1445
|
)
|
|
1422
|
-
assert
|
|
1423
|
-
assert
|
|
1446
|
+
assert rename_test_ipynb in space.listdir('/')
|
|
1447
|
+
assert rename_test_2_ipynb not in space.listdir('/')
|
|
1424
1448
|
|
|
1425
1449
|
space.rename(
|
|
1426
|
-
|
|
1427
|
-
|
|
1450
|
+
rename_test_ipynb,
|
|
1451
|
+
rename_test_2_ipynb,
|
|
1428
1452
|
)
|
|
1429
|
-
assert
|
|
1430
|
-
assert
|
|
1453
|
+
assert rename_test_ipynb not in space.listdir('/')
|
|
1454
|
+
assert rename_test_2_ipynb in space.listdir('/')
|
|
1431
1455
|
|
|
1432
1456
|
# non-existent file
|
|
1433
1457
|
with self.assertRaises(OSError):
|
|
@@ -1435,37 +1459,40 @@ class TestFileSpaces(unittest.TestCase):
|
|
|
1435
1459
|
|
|
1436
1460
|
space.upload_file(
|
|
1437
1461
|
TEST_DIR / 'test.ipynb',
|
|
1438
|
-
|
|
1462
|
+
rename_test_3_ipynb,
|
|
1439
1463
|
)
|
|
1440
1464
|
|
|
1441
1465
|
# overwrite
|
|
1442
1466
|
with self.assertRaises(OSError):
|
|
1443
1467
|
space.rename(
|
|
1444
|
-
|
|
1445
|
-
|
|
1468
|
+
rename_test_2_ipynb,
|
|
1469
|
+
rename_test_3_ipynb,
|
|
1446
1470
|
)
|
|
1447
1471
|
|
|
1448
1472
|
space.rename(
|
|
1449
|
-
|
|
1450
|
-
|
|
1473
|
+
rename_test_2_ipynb,
|
|
1474
|
+
rename_test_3_ipynb, overwrite=True,
|
|
1451
1475
|
)
|
|
1452
1476
|
|
|
1453
1477
|
# Cleanup
|
|
1454
|
-
space.remove(
|
|
1478
|
+
space.remove(rename_test_3_ipynb)
|
|
1455
1479
|
|
|
1456
1480
|
def test_file_object(self):
|
|
1481
|
+
obj_test_ipynb = f'obj_test_{id(self)}.ipynb'
|
|
1482
|
+
obj_test_2_ipynb = f'obj_test_2_{id(self)}.ipynb'
|
|
1483
|
+
|
|
1457
1484
|
for space in [self.personal_space, self.shared_space]:
|
|
1458
1485
|
f = space.upload_file(
|
|
1459
1486
|
TEST_DIR / 'test.ipynb',
|
|
1460
|
-
|
|
1487
|
+
obj_test_ipynb,
|
|
1461
1488
|
)
|
|
1462
1489
|
|
|
1463
1490
|
assert not f.is_dir()
|
|
1464
1491
|
assert f.is_file()
|
|
1465
1492
|
|
|
1466
1493
|
# abspath / basename / dirname / exists
|
|
1467
|
-
assert f.abspath() ==
|
|
1468
|
-
assert f.basename() ==
|
|
1494
|
+
assert f.abspath() == obj_test_ipynb
|
|
1495
|
+
assert f.basename() == obj_test_ipynb
|
|
1469
1496
|
assert f.dirname() == '/'
|
|
1470
1497
|
assert f.exists()
|
|
1471
1498
|
|
|
@@ -1474,9 +1501,9 @@ class TestFileSpaces(unittest.TestCase):
|
|
|
1474
1501
|
open(TEST_DIR / 'test.ipynb', 'r').read()
|
|
1475
1502
|
assert f.download() == open(TEST_DIR / 'test.ipynb', 'rb').read()
|
|
1476
1503
|
|
|
1477
|
-
assert space.is_file(
|
|
1504
|
+
assert space.is_file(obj_test_ipynb)
|
|
1478
1505
|
f.remove()
|
|
1479
|
-
assert not space.is_file(
|
|
1506
|
+
assert not space.is_file(obj_test_ipynb)
|
|
1480
1507
|
|
|
1481
1508
|
# mtime / ctime
|
|
1482
1509
|
assert f.getmtime() > 0
|
|
@@ -1485,17 +1512,17 @@ class TestFileSpaces(unittest.TestCase):
|
|
|
1485
1512
|
# rename
|
|
1486
1513
|
f = space.upload_file(
|
|
1487
1514
|
TEST_DIR / 'test.ipynb',
|
|
1488
|
-
|
|
1515
|
+
obj_test_ipynb,
|
|
1489
1516
|
)
|
|
1490
|
-
assert space.exists(
|
|
1491
|
-
assert not space.exists(
|
|
1492
|
-
f.rename(
|
|
1493
|
-
assert not space.exists(
|
|
1494
|
-
assert space.exists(
|
|
1495
|
-
assert f.abspath() ==
|
|
1517
|
+
assert space.exists(obj_test_ipynb)
|
|
1518
|
+
assert not space.exists(obj_test_2_ipynb)
|
|
1519
|
+
f.rename(obj_test_2_ipynb)
|
|
1520
|
+
assert not space.exists(obj_test_ipynb)
|
|
1521
|
+
assert space.exists(obj_test_2_ipynb)
|
|
1522
|
+
assert f.abspath() == obj_test_2_ipynb
|
|
1496
1523
|
|
|
1497
1524
|
# Cleanup
|
|
1498
|
-
space.remove(
|
|
1525
|
+
space.remove(obj_test_2_ipynb)
|
|
1499
1526
|
|
|
1500
1527
|
|
|
1501
1528
|
@pytest.mark.management
|
singlestoredb/utils/config.py
CHANGED
|
@@ -27,12 +27,12 @@ a description of one or more options.
|
|
|
27
27
|
import contextlib
|
|
28
28
|
import os
|
|
29
29
|
import re
|
|
30
|
+
from collections.abc import Iterator
|
|
31
|
+
from collections.abc import Mapping
|
|
30
32
|
from typing import Any
|
|
31
33
|
from typing import Callable
|
|
32
34
|
from typing import Dict
|
|
33
|
-
from typing import Iterator
|
|
34
35
|
from typing import List
|
|
35
|
-
from typing import Mapping
|
|
36
36
|
from typing import Optional
|
|
37
37
|
from typing import Tuple
|
|
38
38
|
from typing import Union
|
singlestoredb/utils/mogrify.py
CHANGED
singlestoredb/utils/results.py
CHANGED
|
@@ -300,9 +300,9 @@ def results_to_polars(
|
|
|
300
300
|
if has_polars:
|
|
301
301
|
schema = _description_to_polars_schema(desc) if schema is None else schema
|
|
302
302
|
if single:
|
|
303
|
-
out = pl.DataFrame([res], **schema.get('schema', {}))
|
|
303
|
+
out = pl.DataFrame([res], orient='row', **schema.get('schema', {}))
|
|
304
304
|
else:
|
|
305
|
-
out = pl.DataFrame(res, **schema.get('schema', {}))
|
|
305
|
+
out = pl.DataFrame(res, orient='row', **schema.get('schema', {}))
|
|
306
306
|
with_columns = schema.get('with_columns')
|
|
307
307
|
if with_columns:
|
|
308
308
|
return out.with_columns(**with_columns)
|
singlestoredb/utils/xdict.py
CHANGED
|
@@ -19,14 +19,14 @@
|
|
|
19
19
|
"""Dictionary that allows setting nested keys by period (.) delimited strings."""
|
|
20
20
|
import copy
|
|
21
21
|
import re
|
|
22
|
+
from collections.abc import ItemsView
|
|
23
|
+
from collections.abc import Iterable
|
|
24
|
+
from collections.abc import KeysView
|
|
25
|
+
from collections.abc import ValuesView
|
|
22
26
|
from typing import Any
|
|
23
27
|
from typing import Dict
|
|
24
|
-
from typing import ItemsView
|
|
25
|
-
from typing import Iterable
|
|
26
|
-
from typing import KeysView
|
|
27
28
|
from typing import List
|
|
28
29
|
from typing import Tuple
|
|
29
|
-
from typing import ValuesView
|
|
30
30
|
|
|
31
31
|
|
|
32
32
|
def _is_compound_key(key: str) -> bool:
|
|
@@ -1,13 +1,11 @@
|
|
|
1
|
-
Metadata-Version: 2.
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
2
|
Name: singlestoredb
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.16.0
|
|
4
4
|
Summary: Interface to the SingleStoreDB database and workspace management APIs
|
|
5
|
-
|
|
6
|
-
Author: SingleStore
|
|
7
|
-
Author-email: support@singlestore.com
|
|
5
|
+
Author-email: SingleStore <support@singlestore.com>
|
|
8
6
|
License: Apache-2.0
|
|
7
|
+
Project-URL: Homepage, https://github.com/singlestore-labs/singlestoredb-python
|
|
9
8
|
Classifier: Development Status :: 5 - Production/Stable
|
|
10
|
-
Classifier: License :: OSI Approved :: Apache Software License
|
|
11
9
|
Classifier: Programming Language :: Python :: 3
|
|
12
10
|
Classifier: Programming Language :: Python :: 3 :: Only
|
|
13
11
|
Classifier: Topic :: Database
|
|
@@ -15,36 +13,58 @@ Requires-Python: >=3.9
|
|
|
15
13
|
Description-Content-Type: text/markdown
|
|
16
14
|
License-File: LICENSE
|
|
17
15
|
Requires-Dist: PyJWT
|
|
18
|
-
Requires-Dist: build
|
|
19
16
|
Requires-Dist: parsimonious
|
|
20
17
|
Requires-Dist: requests
|
|
21
|
-
Requires-Dist: setuptools
|
|
22
18
|
Requires-Dist: sqlparams
|
|
23
|
-
Requires-Dist:
|
|
24
|
-
Requires-Dist:
|
|
25
|
-
Requires-Dist: typing-extensions <=4.13.2 ; python_version < "3.11"
|
|
19
|
+
Requires-Dist: tomli>=1.1.0; python_version < "3.11"
|
|
20
|
+
Requires-Dist: typing-extensions<=4.13.2; python_version < "3.11"
|
|
26
21
|
Provides-Extra: dataframe
|
|
27
|
-
Requires-Dist: ibis-singlestoredb
|
|
22
|
+
Requires-Dist: ibis-singlestoredb; extra == "dataframe"
|
|
28
23
|
Provides-Extra: dbt
|
|
29
|
-
Requires-Dist: dbt-singlestore
|
|
24
|
+
Requires-Dist: dbt-singlestore; extra == "dbt"
|
|
30
25
|
Provides-Extra: docker
|
|
31
|
-
Requires-Dist: docker
|
|
26
|
+
Requires-Dist: docker; extra == "docker"
|
|
32
27
|
Provides-Extra: ed22519
|
|
33
|
-
Requires-Dist: PyNaCl
|
|
28
|
+
Requires-Dist: PyNaCl>=1.4.0; extra == "ed22519"
|
|
34
29
|
Provides-Extra: gssapi
|
|
35
|
-
Requires-Dist: gssapi
|
|
30
|
+
Requires-Dist: gssapi; extra == "gssapi"
|
|
36
31
|
Provides-Extra: ibis
|
|
37
|
-
Requires-Dist: ibis-singlestoredb
|
|
32
|
+
Requires-Dist: ibis-singlestoredb; extra == "ibis"
|
|
38
33
|
Provides-Extra: kerberos
|
|
39
|
-
Requires-Dist: gssapi
|
|
34
|
+
Requires-Dist: gssapi; extra == "kerberos"
|
|
40
35
|
Provides-Extra: pytest
|
|
41
|
-
Requires-Dist: pytest
|
|
36
|
+
Requires-Dist: pytest; extra == "pytest"
|
|
42
37
|
Provides-Extra: rsa
|
|
43
|
-
Requires-Dist: cryptography
|
|
38
|
+
Requires-Dist: cryptography; extra == "rsa"
|
|
44
39
|
Provides-Extra: sqlalchemy
|
|
45
|
-
Requires-Dist: sqlalchemy-singlestoredb
|
|
40
|
+
Requires-Dist: sqlalchemy-singlestoredb>=1.0.0; extra == "sqlalchemy"
|
|
46
41
|
Provides-Extra: vectorstore
|
|
47
|
-
Requires-Dist: singlestore-vectorstore
|
|
42
|
+
Requires-Dist: singlestore-vectorstore>=0.1.2; extra == "vectorstore"
|
|
43
|
+
Provides-Extra: test
|
|
44
|
+
Requires-Dist: coverage; extra == "test"
|
|
45
|
+
Requires-Dist: dash; extra == "test"
|
|
46
|
+
Requires-Dist: fastapi; extra == "test"
|
|
47
|
+
Requires-Dist: ipython; extra == "test"
|
|
48
|
+
Requires-Dist: jupysql; extra == "test"
|
|
49
|
+
Requires-Dist: pandas; extra == "test"
|
|
50
|
+
Requires-Dist: parameterized; extra == "test"
|
|
51
|
+
Requires-Dist: polars; extra == "test"
|
|
52
|
+
Requires-Dist: pyarrow; extra == "test"
|
|
53
|
+
Requires-Dist: pydantic; extra == "test"
|
|
54
|
+
Requires-Dist: pytest; extra == "test"
|
|
55
|
+
Requires-Dist: pytest-cov; extra == "test"
|
|
56
|
+
Requires-Dist: singlestore-vectorstore>=0.1.2; extra == "test"
|
|
57
|
+
Requires-Dist: uvicorn; extra == "test"
|
|
58
|
+
Provides-Extra: docs
|
|
59
|
+
Requires-Dist: sphinx; extra == "docs"
|
|
60
|
+
Requires-Dist: sphinx_rtd_theme; extra == "docs"
|
|
61
|
+
Provides-Extra: build
|
|
62
|
+
Requires-Dist: build; extra == "build"
|
|
63
|
+
Requires-Dist: setuptools>=61.0; extra == "build"
|
|
64
|
+
Requires-Dist: wheel; extra == "build"
|
|
65
|
+
Provides-Extra: dev
|
|
66
|
+
Requires-Dist: singlestoredb[build,docs,test]; extra == "dev"
|
|
67
|
+
Dynamic: license-file
|
|
48
68
|
|
|
49
69
|
# <img src="https://github.com/singlestore-labs/singlestoredb-python/blob/main/resources/singlestore-logo.png" height="60" valign="middle"/> SingleStoreDB Python SDK
|
|
50
70
|
|
|
@@ -1,17 +1,17 @@
|
|
|
1
|
-
_singlestoredb_accel.pyd,sha256=
|
|
2
|
-
singlestoredb/__init__.py,sha256
|
|
1
|
+
_singlestoredb_accel.pyd,sha256=8zWZ-kYeDG_hk2sSgDrNYf96ElaqgjXBOsemSUub2D8,66048
|
|
2
|
+
singlestoredb/__init__.py,sha256=FacdBWvs_xJzhiKnDXmKnH5waTZef3yqAQGDo44wIB4,2347
|
|
3
3
|
singlestoredb/auth.py,sha256=RmYiH0Wlc2RXc4pTlRMysxtBI445ggCIwojWKC_eDLE,7844
|
|
4
4
|
singlestoredb/config.py,sha256=rS8OmWMaHfMJQTkmSw_qwXR2R0HP80eP4gjzVmXkL2E,14419
|
|
5
|
-
singlestoredb/connection.py,sha256=
|
|
5
|
+
singlestoredb/connection.py,sha256=EX4cUoWK51tb3OYv2eD183yzhRTGsODkip_A89PB_34,47560
|
|
6
6
|
singlestoredb/converters.py,sha256=ax1wpwv04CpDA039UDjDSTw0ojjIY7T9KMz2oYQxKjc,21654
|
|
7
7
|
singlestoredb/exceptions.py,sha256=WCCJrNSsU-hD-621Jpd6bwmvGftQ7byXkk-XKXlaxpg,3354
|
|
8
8
|
singlestoredb/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
9
|
-
singlestoredb/pytest.py,sha256=
|
|
9
|
+
singlestoredb/pytest.py,sha256=kwIyzJIOzBIoJ2-DosUyyfe5F1K6AiuylkCB3hrcfME,12092
|
|
10
10
|
singlestoredb/types.py,sha256=g6iJnOSCuRUkuUJOYSdRPt3QTjC9h2Dq4fqFFktXxXg,10770
|
|
11
11
|
singlestoredb/vectorstore.py,sha256=4YvXml3PpOEOtUGO7gylucKG2Rny8Bx6L29kmhsFiCY,8600
|
|
12
|
-
singlestoredb/ai/__init__.py,sha256=
|
|
13
|
-
singlestoredb/ai/chat.py,sha256=
|
|
14
|
-
singlestoredb/ai/embeddings.py,sha256=
|
|
12
|
+
singlestoredb/ai/__init__.py,sha256=fUacHgFFZDNW9YCtsPPO5yyk9CSdSHZihOyHzZpp5K8,124
|
|
13
|
+
singlestoredb/ai/chat.py,sha256=EEvrmu6hFossMCMg0SKDswnrtHokaRM04_sXb9_Qap8,4820
|
|
14
|
+
singlestoredb/ai/embeddings.py,sha256=zeHxCwTG-YCcLoYnZV0pOB45nsEtCnNZ5cnZABlodHg,4484
|
|
15
15
|
singlestoredb/alchemy/__init__.py,sha256=bUmCl1xUn2v36RMbXLIrvgKzZSqx71mp1ReUw9JeVA8,2613
|
|
16
16
|
singlestoredb/apps/__init__.py,sha256=7l4d4hCtm1ykDNf7UBi3Qnqg9N0qPs5jbQ0Al5tS5aM,173
|
|
17
17
|
singlestoredb/apps/_cloud_functions.py,sha256=DMRC-4z3Q52hsKb_WlolfNcYV-5XmQGiJWbbaUxFZ0s,2794
|
|
@@ -44,26 +44,26 @@ singlestoredb/docstring/tests/test_util.py,sha256=Hc0dWbgvXOKhB6Mjw2Q78lUoy2oph5
|
|
|
44
44
|
singlestoredb/functions/__init__.py,sha256=KPBjRaiVipCQwTSsryHvBE_qrwy7Kj74lKnutqplcao,487
|
|
45
45
|
singlestoredb/functions/decorator.py,sha256=4OWVI6S_M1gXEtWDuHM5FBhJXbiSmIsAYL79DAwpunE,6597
|
|
46
46
|
singlestoredb/functions/dtypes.py,sha256=7w_atIL5jAvDNtu6RDCvY440Y9U-p19_Nf7R5ki46Co,41607
|
|
47
|
-
singlestoredb/functions/signature.py,sha256=
|
|
48
|
-
singlestoredb/functions/utils.py,sha256=
|
|
47
|
+
singlestoredb/functions/signature.py,sha256=sIsYuVD9U9Nmm1dAh8im8T7_4U-UxPBLJAVB0WirR9w,47132
|
|
48
|
+
singlestoredb/functions/utils.py,sha256=jl51TsbzRylgP8Tp_9Fa5RxlM8a_BIy-ut-zrGkx8FQ,11199
|
|
49
49
|
singlestoredb/functions/ext/__init__.py,sha256=5ppI8IZN_zOwoJFdu_Oq9ipxtyHw9n6OMVAa_s9T_yY,24
|
|
50
50
|
singlestoredb/functions/ext/arrow.py,sha256=mQhwaMpvCH_dP92WIhP_j-stu272n4UAHsFUOBTgnq0,9436
|
|
51
|
-
singlestoredb/functions/ext/asgi.py,sha256=
|
|
51
|
+
singlestoredb/functions/ext/asgi.py,sha256=cF6468Y13Fj18gYubzRPubSn_iNGbzyAUE75vvjz5O0,74428
|
|
52
52
|
singlestoredb/functions/ext/json.py,sha256=j9133xOpyuSqb8smBmi_bPvv6OYCbNfpbLbEicyGqmQ,10522
|
|
53
53
|
singlestoredb/functions/ext/mmap.py,sha256=0BN9OyEONZ174qdZWe2m3Xykt3-QcxyLYBt2iCG772Q,14123
|
|
54
|
-
singlestoredb/functions/ext/rowdat_1.py,sha256=
|
|
54
|
+
singlestoredb/functions/ext/rowdat_1.py,sha256=4jnDQnk3A8vR_ZXiZhP7K9vQD5ImxaIVAdlhHKLldSw,21845
|
|
55
55
|
singlestoredb/functions/ext/timer.py,sha256=fVo0YIwV8T6Fbl6kBbgnwMTQeLVXCVDmuzkXLC5MpVg,2795
|
|
56
56
|
singlestoredb/functions/ext/utils.py,sha256=KE0g1s4jUEoJK44CChuCgPB6Ko3KpRbVSYI_aHbzams,7156
|
|
57
|
-
singlestoredb/functions/typing/__init__.py,sha256=
|
|
57
|
+
singlestoredb/functions/typing/__init__.py,sha256=3-WRGcUjLsiYXiZmA2MVQeg494r716XtasaODew78Rk,1389
|
|
58
58
|
singlestoredb/functions/typing/numpy.py,sha256=WJt0bWwyEA8Mofpn_-0Q82u7Q8XAtzBuhbaXSqE1E34,681
|
|
59
59
|
singlestoredb/functions/typing/pandas.py,sha256=-abvGDup-WwTbaAyQuNo4Fq7ATe8gYx_5b2yUPJlX7o,85
|
|
60
60
|
singlestoredb/functions/typing/polars.py,sha256=HWAjc6o6NAAXZjNIUyLe5R4ZgrIz2NHjk43wTSpy4bY,85
|
|
61
61
|
singlestoredb/functions/typing/pyarrow.py,sha256=gQcvvrm5BYeuYl8UKr8-07DbA_AtbpkiSEtQkRMW7AA,82
|
|
62
62
|
singlestoredb/fusion/__init__.py,sha256=FHWtrg6OJFTf6Ye197V5sU6ssryr2h6FBcDIgXP7-H4,367
|
|
63
63
|
singlestoredb/fusion/graphql.py,sha256=SHqsPe4xgawdsTPHEtJGQlybYGWqPrGMmyK-v20RLac,5420
|
|
64
|
-
singlestoredb/fusion/handler.py,sha256=
|
|
64
|
+
singlestoredb/fusion/handler.py,sha256=GjxmJ19B_tNl-ymKj1a2CW4KLMUiZvwv9o16-9jHCNM,28607
|
|
65
65
|
singlestoredb/fusion/registry.py,sha256=_eT1gd38VPlFKs5f9Pu6lqQyoDQ_ixW5O56QwYLQ89Y,6361
|
|
66
|
-
singlestoredb/fusion/result.py,sha256=
|
|
66
|
+
singlestoredb/fusion/result.py,sha256=p88VTNwAtiOccM-UTtLMDj7v_IRlLkHufFQuWUk_-3w,12198
|
|
67
67
|
singlestoredb/fusion/handlers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
68
68
|
singlestoredb/fusion/handlers/export.py,sha256=MqPINMHGl-7SkKzdHcgg343uoFQDXaKSHdoFmefA-KM,15834
|
|
69
69
|
singlestoredb/fusion/handlers/files.py,sha256=kz4eP06Xux1PW3L-Hj8-PmaCKc-1J8QeKwgqUdudqYA,19661
|
|
@@ -73,7 +73,7 @@ singlestoredb/fusion/handlers/stage.py,sha256=oNl11GYUUQHmIrWsqaA1X8lokvFxFgN0Ce
|
|
|
73
73
|
singlestoredb/fusion/handlers/utils.py,sha256=nV2lSzKhv7CzM7I_uIh5kmDV0Ec6VeeKoHczx5pVNcw,11009
|
|
74
74
|
singlestoredb/fusion/handlers/workspace.py,sha256=2m8tBM6XY4nUk64uY7puqecFc7fBNQo2G9nDaO2nlS0,30623
|
|
75
75
|
singlestoredb/http/__init__.py,sha256=4cEDvLloGc3LSpU-PnIwacyu0n5oIIIE6xk2SPyWD_w,939
|
|
76
|
-
singlestoredb/http/connection.py,sha256=
|
|
76
|
+
singlestoredb/http/connection.py,sha256=u4WTtNB3R9Ea9HN-yx77MDeI4pqNaO_FXCKnkWxYkKI,41012
|
|
77
77
|
singlestoredb/magics/__init__.py,sha256=fqCBQ0s8o1CYE4Xo_XiSbkLDzLgMNDgpSkOx66-uDZw,1244
|
|
78
78
|
singlestoredb/magics/run_personal.py,sha256=D71VVRk-qQAZf6fHUbxqTadxcopSklJu7ccoQ82uhV8,5359
|
|
79
79
|
singlestoredb/magics/run_shared.py,sha256=9Lo3hmESgp0gGLaL1pgLtTA6qsbIZluM7mufcoCAVcI,5264
|
|
@@ -84,15 +84,15 @@ singlestoredb/management/export.py,sha256=9kNb9C9HHyUrIfIWVvpghal47SQutoa2PTPmys
|
|
|
84
84
|
singlestoredb/management/files.py,sha256=Z9GpS2EHf9atE8kJdz1vJtsiT80O6TV00MPhqyXfAAw,31579
|
|
85
85
|
singlestoredb/management/inference_api.py,sha256=HHkqd64XvYN9_3jeNF-y4GllkpLskn35iM2p3sZj5PU,2847
|
|
86
86
|
singlestoredb/management/job.py,sha256=Npfe1JLYJlggGBrXLniPKwKUKF1i3alvSY1SFtvauSs,25498
|
|
87
|
-
singlestoredb/management/manager.py,sha256=
|
|
87
|
+
singlestoredb/management/manager.py,sha256=jQJi9Lz4eTmqOpQMW0KHz-uMrk404OfXr7F_tCnYQ5M,11837
|
|
88
88
|
singlestoredb/management/organization.py,sha256=viFG8eLVOs-NeoL6zm8nypFRQ-oiRDD2Sk-bL2b6hvw,6095
|
|
89
89
|
singlestoredb/management/region.py,sha256=4c4z6ETYrSIK3wm2UA4Wr2Td1UgoechN0l1-mqy5bvQ,4283
|
|
90
|
-
singlestoredb/management/utils.py,sha256=
|
|
91
|
-
singlestoredb/management/workspace.py,sha256=
|
|
90
|
+
singlestoredb/management/utils.py,sha256=ls8sgv-VekUzuoO3oXR8iLShdSudktuuD8dj0lgcDtM,13635
|
|
91
|
+
singlestoredb/management/workspace.py,sha256=bq_aqLUwELtE5pkh2oAV2vwfyT1H89wt-6wgAH8nFl4,64106
|
|
92
92
|
singlestoredb/mysql/__init__.py,sha256=CbpwzNUJPAmKPpIobC0-ugBta_RgHCMq7X7N75QLReY,4669
|
|
93
93
|
singlestoredb/mysql/_auth.py,sha256=YaqqyvAHmeraBv3BM207rNveUVPM-mPnW20ts_ynVWg,8341
|
|
94
94
|
singlestoredb/mysql/charset.py,sha256=mnCdMpvdub1S2mm2PSk2j5JddgsWRjsVLtGx-y9TskE,10724
|
|
95
|
-
singlestoredb/mysql/connection.py,sha256=
|
|
95
|
+
singlestoredb/mysql/connection.py,sha256=poDB0QyxZT2NR6l0n3P5ZwFlEBr2HTDapWHdM1Y1Aaw,75396
|
|
96
96
|
singlestoredb/mysql/converters.py,sha256=vebFFm6IrC0WgY-5Eh-esaPizY5cq3vDOUlEKGaYM-U,7771
|
|
97
97
|
singlestoredb/mysql/cursors.py,sha256=YoZU5_weniqXcoeA0GVSxmetkPYooiDkXMbVBYUNlrU,27942
|
|
98
98
|
singlestoredb/mysql/err.py,sha256=aDbmfq08gWVmfgIea735wSeiFdvYbB5wusgd3qTVq1s,2480
|
|
@@ -154,7 +154,7 @@ singlestoredb/tests/test_ext_func.py,sha256=YidPnlO7HWsVIbPwdCa33Oo8SyGkP2_Pcuj_
|
|
|
154
154
|
singlestoredb/tests/test_ext_func_data.py,sha256=LeQoV5QQkSJ7WVOKZw_F5zzIFXXrGputh3k_lDDKlG4,48616
|
|
155
155
|
singlestoredb/tests/test_fusion.py,sha256=XT5rhYx32mndcZGaW2Xc7DTLMLEcf_vO3w1Dxss9nMM,52120
|
|
156
156
|
singlestoredb/tests/test_http.py,sha256=7hwXe61hlUes3nji0MTTZweo94tJAlJ-vA5ct9geXFQ,8868
|
|
157
|
-
singlestoredb/tests/test_management.py,sha256=
|
|
157
|
+
singlestoredb/tests/test_management.py,sha256=UOsokfCXL_emiHBgq_Bt6qHOfwhUTMcXrRdY9JNsL5c,55112
|
|
158
158
|
singlestoredb/tests/test_plugin.py,sha256=P1nXLnTafaHkHN-6bVbGryxTu7OWJPU9SYFZ_WQUwq8,845
|
|
159
159
|
singlestoredb/tests/test_results.py,sha256=Zg1ynZFRZqalAMfNLOU5C6BDXaox6JxrKm_XZwVNFcg,6753
|
|
160
160
|
singlestoredb/tests/test_types.py,sha256=YeVE6KPqlqzJke-4hbRmc8ko1E7RLHu5S8qLg04Bl5Y,4632
|
|
@@ -165,19 +165,19 @@ singlestoredb/tests/test_xdict.py,sha256=5ArRJqd5aNXkPK7Y6sFeRbqZ59MZ1YaGBpSlDAb
|
|
|
165
165
|
singlestoredb/tests/utils.py,sha256=WR8GFNiC0lU4tz21Y3rlbbp9Gz9WcSwp2jpUSCj7RFU,5136
|
|
166
166
|
singlestoredb/tests/ext_funcs/__init__.py,sha256=6gPTR_cRvAUjv0gX58pL2CrerzqMsOyv9y351jeR3es,16283
|
|
167
167
|
singlestoredb/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
168
|
-
singlestoredb/utils/config.py,sha256=
|
|
168
|
+
singlestoredb/utils/config.py,sha256=wiRdzMjMLzhzueDK9kgdeaxk26lGR8-m8Z0o_BtHlHs,25471
|
|
169
169
|
singlestoredb/utils/convert_rows.py,sha256=gkZeZazeJvimCYEQ1FdAC-AmMDwmFGCuP6mi653bpns,1885
|
|
170
170
|
singlestoredb/utils/debug.py,sha256=y7dnJeJGt3U_BWXz9pLt1qNQREpPtumYX_sk1DiqG6Y,362
|
|
171
171
|
singlestoredb/utils/dtypes.py,sha256=_P2fTX2Fgv9Bcl-2L6KivhWgLzyu91sDamxVnmG92Mw,6103
|
|
172
172
|
singlestoredb/utils/events.py,sha256=Wpp4Z5kw6f7axGAerMirEhgjcAArboQtMc4aqXzfKIc,1519
|
|
173
|
-
singlestoredb/utils/mogrify.py,sha256=
|
|
174
|
-
singlestoredb/utils/results.py,sha256=
|
|
175
|
-
singlestoredb/utils/xdict.py,sha256
|
|
173
|
+
singlestoredb/utils/mogrify.py,sha256=lkJ8Vpt9broAYnaUOgwEo72puzqnMN0t8Vobz3ZLzjY,4210
|
|
174
|
+
singlestoredb/utils/results.py,sha256=QS6yNaeBesrUNBHMcWlhSIA92MJUorKmwXncrvct21w,15890
|
|
175
|
+
singlestoredb/utils/xdict.py,sha256=XV5U47yeaXst8lFLugHTIWwSC1WvgzxMIvwEuyYAHbs,13357
|
|
176
|
+
singlestoredb-1.16.0.dist-info/licenses/LICENSE,sha256=Bojenzui8aPNjlF3w4ojguDP7sTf8vFV_9Gc2UAG1sg,11542
|
|
176
177
|
sqlx/__init__.py,sha256=4Sdn8HN-Hf8v0_wCt60DCckCg8BvgM3-9r4YVfZycRE,89
|
|
177
178
|
sqlx/magic.py,sha256=6VBlotgjautjev599tHaTYOfcfOA9m6gV_-P1_Qc4lI,3622
|
|
178
|
-
singlestoredb-1.
|
|
179
|
-
singlestoredb-1.
|
|
180
|
-
singlestoredb-1.
|
|
181
|
-
singlestoredb-1.
|
|
182
|
-
singlestoredb-1.
|
|
183
|
-
singlestoredb-1.15.8.dist-info/RECORD,,
|
|
179
|
+
singlestoredb-1.16.0.dist-info/METADATA,sha256=wL7ZSLpknWW-3xkecLpjUBsvvpsICj-dk8n_r1_hPis,6798
|
|
180
|
+
singlestoredb-1.16.0.dist-info/WHEEL,sha256=-bFk29V6nDuVSN89fTfRJ2-J9pkqt9vKB2gUxbkZGc4,95
|
|
181
|
+
singlestoredb-1.16.0.dist-info/entry_points.txt,sha256=bSLaTWB5zGjpVYPAaI46MkkDup0su-eb3uAhCNYuRV0,48
|
|
182
|
+
singlestoredb-1.16.0.dist-info/top_level.txt,sha256=Snoa-71Carn2WopDKin_NCwprZQdMy3UIamcZi7-0NI,51
|
|
183
|
+
singlestoredb-1.16.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|