singlestoredb 1.16.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- singlestoredb/__init__.py +75 -0
- singlestoredb/ai/__init__.py +2 -0
- singlestoredb/ai/chat.py +139 -0
- singlestoredb/ai/embeddings.py +128 -0
- singlestoredb/alchemy/__init__.py +90 -0
- singlestoredb/apps/__init__.py +3 -0
- singlestoredb/apps/_cloud_functions.py +90 -0
- singlestoredb/apps/_config.py +72 -0
- singlestoredb/apps/_connection_info.py +18 -0
- singlestoredb/apps/_dashboards.py +47 -0
- singlestoredb/apps/_process.py +32 -0
- singlestoredb/apps/_python_udfs.py +100 -0
- singlestoredb/apps/_stdout_supress.py +30 -0
- singlestoredb/apps/_uvicorn_util.py +36 -0
- singlestoredb/auth.py +245 -0
- singlestoredb/config.py +484 -0
- singlestoredb/connection.py +1487 -0
- singlestoredb/converters.py +950 -0
- singlestoredb/docstring/__init__.py +33 -0
- singlestoredb/docstring/attrdoc.py +126 -0
- singlestoredb/docstring/common.py +230 -0
- singlestoredb/docstring/epydoc.py +267 -0
- singlestoredb/docstring/google.py +412 -0
- singlestoredb/docstring/numpydoc.py +562 -0
- singlestoredb/docstring/parser.py +100 -0
- singlestoredb/docstring/py.typed +1 -0
- singlestoredb/docstring/rest.py +256 -0
- singlestoredb/docstring/tests/__init__.py +1 -0
- singlestoredb/docstring/tests/_pydoctor.py +21 -0
- singlestoredb/docstring/tests/test_epydoc.py +729 -0
- singlestoredb/docstring/tests/test_google.py +1007 -0
- singlestoredb/docstring/tests/test_numpydoc.py +1100 -0
- singlestoredb/docstring/tests/test_parse_from_object.py +109 -0
- singlestoredb/docstring/tests/test_parser.py +248 -0
- singlestoredb/docstring/tests/test_rest.py +547 -0
- singlestoredb/docstring/tests/test_util.py +70 -0
- singlestoredb/docstring/util.py +141 -0
- singlestoredb/exceptions.py +120 -0
- singlestoredb/functions/__init__.py +16 -0
- singlestoredb/functions/decorator.py +201 -0
- singlestoredb/functions/dtypes.py +1793 -0
- singlestoredb/functions/ext/__init__.py +1 -0
- singlestoredb/functions/ext/arrow.py +375 -0
- singlestoredb/functions/ext/asgi.py +2133 -0
- singlestoredb/functions/ext/json.py +420 -0
- singlestoredb/functions/ext/mmap.py +413 -0
- singlestoredb/functions/ext/rowdat_1.py +724 -0
- singlestoredb/functions/ext/timer.py +89 -0
- singlestoredb/functions/ext/utils.py +218 -0
- singlestoredb/functions/signature.py +1578 -0
- singlestoredb/functions/typing/__init__.py +41 -0
- singlestoredb/functions/typing/numpy.py +20 -0
- singlestoredb/functions/typing/pandas.py +2 -0
- singlestoredb/functions/typing/polars.py +2 -0
- singlestoredb/functions/typing/pyarrow.py +2 -0
- singlestoredb/functions/utils.py +421 -0
- singlestoredb/fusion/__init__.py +11 -0
- singlestoredb/fusion/graphql.py +213 -0
- singlestoredb/fusion/handler.py +916 -0
- singlestoredb/fusion/handlers/__init__.py +0 -0
- singlestoredb/fusion/handlers/export.py +525 -0
- singlestoredb/fusion/handlers/files.py +690 -0
- singlestoredb/fusion/handlers/job.py +660 -0
- singlestoredb/fusion/handlers/models.py +250 -0
- singlestoredb/fusion/handlers/stage.py +502 -0
- singlestoredb/fusion/handlers/utils.py +324 -0
- singlestoredb/fusion/handlers/workspace.py +956 -0
- singlestoredb/fusion/registry.py +249 -0
- singlestoredb/fusion/result.py +399 -0
- singlestoredb/http/__init__.py +27 -0
- singlestoredb/http/connection.py +1267 -0
- singlestoredb/magics/__init__.py +34 -0
- singlestoredb/magics/run_personal.py +137 -0
- singlestoredb/magics/run_shared.py +134 -0
- singlestoredb/management/__init__.py +9 -0
- singlestoredb/management/billing_usage.py +148 -0
- singlestoredb/management/cluster.py +462 -0
- singlestoredb/management/export.py +295 -0
- singlestoredb/management/files.py +1102 -0
- singlestoredb/management/inference_api.py +105 -0
- singlestoredb/management/job.py +887 -0
- singlestoredb/management/manager.py +373 -0
- singlestoredb/management/organization.py +226 -0
- singlestoredb/management/region.py +169 -0
- singlestoredb/management/utils.py +423 -0
- singlestoredb/management/workspace.py +1927 -0
- singlestoredb/mysql/__init__.py +177 -0
- singlestoredb/mysql/_auth.py +298 -0
- singlestoredb/mysql/charset.py +214 -0
- singlestoredb/mysql/connection.py +2032 -0
- singlestoredb/mysql/constants/CLIENT.py +38 -0
- singlestoredb/mysql/constants/COMMAND.py +32 -0
- singlestoredb/mysql/constants/CR.py +78 -0
- singlestoredb/mysql/constants/ER.py +474 -0
- singlestoredb/mysql/constants/EXTENDED_TYPE.py +3 -0
- singlestoredb/mysql/constants/FIELD_TYPE.py +48 -0
- singlestoredb/mysql/constants/FLAG.py +15 -0
- singlestoredb/mysql/constants/SERVER_STATUS.py +10 -0
- singlestoredb/mysql/constants/VECTOR_TYPE.py +6 -0
- singlestoredb/mysql/constants/__init__.py +0 -0
- singlestoredb/mysql/converters.py +271 -0
- singlestoredb/mysql/cursors.py +896 -0
- singlestoredb/mysql/err.py +92 -0
- singlestoredb/mysql/optionfile.py +20 -0
- singlestoredb/mysql/protocol.py +450 -0
- singlestoredb/mysql/tests/__init__.py +19 -0
- singlestoredb/mysql/tests/base.py +126 -0
- singlestoredb/mysql/tests/conftest.py +37 -0
- singlestoredb/mysql/tests/test_DictCursor.py +132 -0
- singlestoredb/mysql/tests/test_SSCursor.py +141 -0
- singlestoredb/mysql/tests/test_basic.py +452 -0
- singlestoredb/mysql/tests/test_connection.py +851 -0
- singlestoredb/mysql/tests/test_converters.py +58 -0
- singlestoredb/mysql/tests/test_cursor.py +141 -0
- singlestoredb/mysql/tests/test_err.py +16 -0
- singlestoredb/mysql/tests/test_issues.py +514 -0
- singlestoredb/mysql/tests/test_load_local.py +75 -0
- singlestoredb/mysql/tests/test_nextset.py +88 -0
- singlestoredb/mysql/tests/test_optionfile.py +27 -0
- singlestoredb/mysql/tests/thirdparty/__init__.py +6 -0
- singlestoredb/mysql/tests/thirdparty/test_MySQLdb/__init__.py +9 -0
- singlestoredb/mysql/tests/thirdparty/test_MySQLdb/capabilities.py +323 -0
- singlestoredb/mysql/tests/thirdparty/test_MySQLdb/dbapi20.py +865 -0
- singlestoredb/mysql/tests/thirdparty/test_MySQLdb/test_MySQLdb_capabilities.py +110 -0
- singlestoredb/mysql/tests/thirdparty/test_MySQLdb/test_MySQLdb_dbapi20.py +224 -0
- singlestoredb/mysql/tests/thirdparty/test_MySQLdb/test_MySQLdb_nonstandard.py +101 -0
- singlestoredb/mysql/times.py +23 -0
- singlestoredb/notebook/__init__.py +16 -0
- singlestoredb/notebook/_objects.py +213 -0
- singlestoredb/notebook/_portal.py +352 -0
- singlestoredb/py.typed +0 -0
- singlestoredb/pytest.py +352 -0
- singlestoredb/server/__init__.py +0 -0
- singlestoredb/server/docker.py +452 -0
- singlestoredb/server/free_tier.py +267 -0
- singlestoredb/tests/__init__.py +0 -0
- singlestoredb/tests/alltypes.sql +307 -0
- singlestoredb/tests/alltypes_no_nulls.sql +208 -0
- singlestoredb/tests/empty.sql +0 -0
- singlestoredb/tests/ext_funcs/__init__.py +702 -0
- singlestoredb/tests/local_infile.csv +3 -0
- singlestoredb/tests/test.ipynb +18 -0
- singlestoredb/tests/test.sql +680 -0
- singlestoredb/tests/test2.ipynb +18 -0
- singlestoredb/tests/test2.sql +1 -0
- singlestoredb/tests/test_basics.py +1332 -0
- singlestoredb/tests/test_config.py +318 -0
- singlestoredb/tests/test_connection.py +3103 -0
- singlestoredb/tests/test_dbapi.py +27 -0
- singlestoredb/tests/test_exceptions.py +45 -0
- singlestoredb/tests/test_ext_func.py +1472 -0
- singlestoredb/tests/test_ext_func_data.py +1101 -0
- singlestoredb/tests/test_fusion.py +1527 -0
- singlestoredb/tests/test_http.py +288 -0
- singlestoredb/tests/test_management.py +1599 -0
- singlestoredb/tests/test_plugin.py +33 -0
- singlestoredb/tests/test_results.py +171 -0
- singlestoredb/tests/test_types.py +132 -0
- singlestoredb/tests/test_udf.py +737 -0
- singlestoredb/tests/test_udf_returns.py +459 -0
- singlestoredb/tests/test_vectorstore.py +51 -0
- singlestoredb/tests/test_xdict.py +333 -0
- singlestoredb/tests/utils.py +141 -0
- singlestoredb/types.py +373 -0
- singlestoredb/utils/__init__.py +0 -0
- singlestoredb/utils/config.py +950 -0
- singlestoredb/utils/convert_rows.py +69 -0
- singlestoredb/utils/debug.py +13 -0
- singlestoredb/utils/dtypes.py +205 -0
- singlestoredb/utils/events.py +65 -0
- singlestoredb/utils/mogrify.py +151 -0
- singlestoredb/utils/results.py +585 -0
- singlestoredb/utils/xdict.py +425 -0
- singlestoredb/vectorstore.py +192 -0
- singlestoredb/warnings.py +5 -0
- singlestoredb-1.16.1.dist-info/METADATA +165 -0
- singlestoredb-1.16.1.dist-info/RECORD +183 -0
- singlestoredb-1.16.1.dist-info/WHEEL +5 -0
- singlestoredb-1.16.1.dist-info/entry_points.txt +2 -0
- singlestoredb-1.16.1.dist-info/licenses/LICENSE +201 -0
- singlestoredb-1.16.1.dist-info/top_level.txt +3 -0
- sqlx/__init__.py +4 -0
- sqlx/magic.py +113 -0
|
@@ -0,0 +1,1927 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
"""SingleStoreDB Workspace Management."""
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import datetime
|
|
6
|
+
import glob
|
|
7
|
+
import io
|
|
8
|
+
import os
|
|
9
|
+
import re
|
|
10
|
+
import time
|
|
11
|
+
from collections.abc import Mapping
|
|
12
|
+
from typing import Any
|
|
13
|
+
from typing import Dict
|
|
14
|
+
from typing import List
|
|
15
|
+
from typing import Optional
|
|
16
|
+
from typing import Union
|
|
17
|
+
|
|
18
|
+
from .. import config
|
|
19
|
+
from .. import connection
|
|
20
|
+
from ..exceptions import ManagementError
|
|
21
|
+
from .billing_usage import BillingUsageItem
|
|
22
|
+
from .files import FileLocation
|
|
23
|
+
from .files import FilesObject
|
|
24
|
+
from .files import FilesObjectBytesReader
|
|
25
|
+
from .files import FilesObjectBytesWriter
|
|
26
|
+
from .files import FilesObjectTextReader
|
|
27
|
+
from .files import FilesObjectTextWriter
|
|
28
|
+
from .manager import Manager
|
|
29
|
+
from .organization import Organization
|
|
30
|
+
from .region import Region
|
|
31
|
+
from .utils import camel_to_snake_dict
|
|
32
|
+
from .utils import from_datetime
|
|
33
|
+
from .utils import NamedList
|
|
34
|
+
from .utils import PathLike
|
|
35
|
+
from .utils import snake_to_camel
|
|
36
|
+
from .utils import snake_to_camel_dict
|
|
37
|
+
from .utils import to_datetime
|
|
38
|
+
from .utils import ttl_property
|
|
39
|
+
from .utils import vars_to_str
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def get_organization() -> Organization:
|
|
43
|
+
"""Get the organization."""
|
|
44
|
+
return manage_workspaces().organization
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def get_secret(name: str) -> Optional[str]:
|
|
48
|
+
"""Get a secret from the organization."""
|
|
49
|
+
return get_organization().get_secret(name).value
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def get_workspace_group(
|
|
53
|
+
workspace_group: Optional[Union[WorkspaceGroup, str]] = None,
|
|
54
|
+
) -> WorkspaceGroup:
|
|
55
|
+
"""Get the stage for the workspace group."""
|
|
56
|
+
if isinstance(workspace_group, WorkspaceGroup):
|
|
57
|
+
return workspace_group
|
|
58
|
+
elif workspace_group:
|
|
59
|
+
return manage_workspaces().workspace_groups[workspace_group]
|
|
60
|
+
elif 'SINGLESTOREDB_WORKSPACE_GROUP' in os.environ:
|
|
61
|
+
return manage_workspaces().workspace_groups[
|
|
62
|
+
os.environ['SINGLESTOREDB_WORKSPACE_GROUP']
|
|
63
|
+
]
|
|
64
|
+
raise RuntimeError('no workspace group specified')
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def get_stage(
|
|
68
|
+
workspace_group: Optional[Union[WorkspaceGroup, str]] = None,
|
|
69
|
+
) -> Stage:
|
|
70
|
+
"""Get the stage for the workspace group."""
|
|
71
|
+
return get_workspace_group(workspace_group).stage
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def get_workspace(
|
|
75
|
+
workspace_group: Optional[Union[WorkspaceGroup, str]] = None,
|
|
76
|
+
workspace: Optional[Union[Workspace, str]] = None,
|
|
77
|
+
) -> Workspace:
|
|
78
|
+
"""Get the workspaces for a workspace_group."""
|
|
79
|
+
if isinstance(workspace, Workspace):
|
|
80
|
+
return workspace
|
|
81
|
+
wg = get_workspace_group(workspace_group)
|
|
82
|
+
if workspace:
|
|
83
|
+
return wg.workspaces[workspace]
|
|
84
|
+
elif 'SINGLESTOREDB_WORKSPACE' in os.environ:
|
|
85
|
+
return wg.workspaces[
|
|
86
|
+
os.environ['SINGLESTOREDB_WORKSPACE']
|
|
87
|
+
]
|
|
88
|
+
raise RuntimeError('no workspace group specified')
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
class Stage(FileLocation):
|
|
92
|
+
"""
|
|
93
|
+
Stage manager.
|
|
94
|
+
|
|
95
|
+
This object is not instantiated directly.
|
|
96
|
+
It is returned by ``WorkspaceGroup.stage`` or ``StarterWorkspace.stage``.
|
|
97
|
+
|
|
98
|
+
"""
|
|
99
|
+
|
|
100
|
+
def __init__(self, deployment_id: str, manager: WorkspaceManager):
|
|
101
|
+
self._deployment_id = deployment_id
|
|
102
|
+
self._manager = manager
|
|
103
|
+
|
|
104
|
+
def open(
|
|
105
|
+
self,
|
|
106
|
+
stage_path: PathLike,
|
|
107
|
+
mode: str = 'r',
|
|
108
|
+
encoding: Optional[str] = None,
|
|
109
|
+
) -> Union[io.StringIO, io.BytesIO]:
|
|
110
|
+
"""
|
|
111
|
+
Open a Stage path for reading or writing.
|
|
112
|
+
|
|
113
|
+
Parameters
|
|
114
|
+
----------
|
|
115
|
+
stage_path : Path or str
|
|
116
|
+
The stage path to read / write
|
|
117
|
+
mode : str, optional
|
|
118
|
+
The read / write mode. The following modes are supported:
|
|
119
|
+
* 'r' open for reading (default)
|
|
120
|
+
* 'w' open for writing, truncating the file first
|
|
121
|
+
* 'x' create a new file and open it for writing
|
|
122
|
+
The data type can be specified by adding one of the following:
|
|
123
|
+
* 'b' binary mode
|
|
124
|
+
* 't' text mode (default)
|
|
125
|
+
encoding : str, optional
|
|
126
|
+
The string encoding to use for text
|
|
127
|
+
|
|
128
|
+
Returns
|
|
129
|
+
-------
|
|
130
|
+
FilesObjectBytesReader - 'rb' or 'b' mode
|
|
131
|
+
FilesObjectBytesWriter - 'wb' or 'xb' mode
|
|
132
|
+
FilesObjectTextReader - 'r' or 'rt' mode
|
|
133
|
+
FilesObjectTextWriter - 'w', 'x', 'wt' or 'xt' mode
|
|
134
|
+
|
|
135
|
+
"""
|
|
136
|
+
if '+' in mode or 'a' in mode:
|
|
137
|
+
raise ValueError('modifying an existing stage file is not supported')
|
|
138
|
+
|
|
139
|
+
if 'w' in mode or 'x' in mode:
|
|
140
|
+
exists = self.exists(stage_path)
|
|
141
|
+
if exists:
|
|
142
|
+
if 'x' in mode:
|
|
143
|
+
raise FileExistsError(f'stage path already exists: {stage_path}')
|
|
144
|
+
self.remove(stage_path)
|
|
145
|
+
if 'b' in mode:
|
|
146
|
+
return FilesObjectBytesWriter(b'', self, stage_path)
|
|
147
|
+
return FilesObjectTextWriter('', self, stage_path)
|
|
148
|
+
|
|
149
|
+
if 'r' in mode:
|
|
150
|
+
content = self.download_file(stage_path)
|
|
151
|
+
if isinstance(content, bytes):
|
|
152
|
+
if 'b' in mode:
|
|
153
|
+
return FilesObjectBytesReader(content)
|
|
154
|
+
encoding = 'utf-8' if encoding is None else encoding
|
|
155
|
+
return FilesObjectTextReader(content.decode(encoding))
|
|
156
|
+
|
|
157
|
+
if isinstance(content, str):
|
|
158
|
+
return FilesObjectTextReader(content)
|
|
159
|
+
|
|
160
|
+
raise ValueError(f'unrecognized file content type: {type(content)}')
|
|
161
|
+
|
|
162
|
+
raise ValueError(f'must have one of create/read/write mode specified: {mode}')
|
|
163
|
+
|
|
164
|
+
def upload_file(
|
|
165
|
+
self,
|
|
166
|
+
local_path: Union[PathLike, io.IOBase],
|
|
167
|
+
stage_path: PathLike,
|
|
168
|
+
*,
|
|
169
|
+
overwrite: bool = False,
|
|
170
|
+
) -> FilesObject:
|
|
171
|
+
"""
|
|
172
|
+
Upload a local file.
|
|
173
|
+
|
|
174
|
+
Parameters
|
|
175
|
+
----------
|
|
176
|
+
local_path : Path or str or file-like
|
|
177
|
+
Path to the local file or an open file object
|
|
178
|
+
stage_path : Path or str
|
|
179
|
+
Path to the stage file
|
|
180
|
+
overwrite : bool, optional
|
|
181
|
+
Should the ``stage_path`` be overwritten if it exists already?
|
|
182
|
+
|
|
183
|
+
"""
|
|
184
|
+
if isinstance(local_path, io.IOBase):
|
|
185
|
+
pass
|
|
186
|
+
elif not os.path.isfile(local_path):
|
|
187
|
+
raise IsADirectoryError(f'local path is not a file: {local_path}')
|
|
188
|
+
|
|
189
|
+
if self.exists(stage_path):
|
|
190
|
+
if not overwrite:
|
|
191
|
+
raise OSError(f'stage path already exists: {stage_path}')
|
|
192
|
+
|
|
193
|
+
self.remove(stage_path)
|
|
194
|
+
|
|
195
|
+
if isinstance(local_path, io.IOBase):
|
|
196
|
+
return self._upload(local_path, stage_path, overwrite=overwrite)
|
|
197
|
+
|
|
198
|
+
return self._upload(open(local_path, 'rb'), stage_path, overwrite=overwrite)
|
|
199
|
+
|
|
200
|
+
def upload_folder(
|
|
201
|
+
self,
|
|
202
|
+
local_path: PathLike,
|
|
203
|
+
stage_path: PathLike,
|
|
204
|
+
*,
|
|
205
|
+
overwrite: bool = False,
|
|
206
|
+
recursive: bool = True,
|
|
207
|
+
include_root: bool = False,
|
|
208
|
+
ignore: Optional[Union[PathLike, List[PathLike]]] = None,
|
|
209
|
+
) -> FilesObject:
|
|
210
|
+
"""
|
|
211
|
+
Upload a folder recursively.
|
|
212
|
+
|
|
213
|
+
Only the contents of the folder are uploaded. To include the
|
|
214
|
+
folder name itself in the target path use ``include_root=True``.
|
|
215
|
+
|
|
216
|
+
Parameters
|
|
217
|
+
----------
|
|
218
|
+
local_path : Path or str
|
|
219
|
+
Local directory to upload
|
|
220
|
+
stage_path : Path or str
|
|
221
|
+
Path of stage folder to upload to
|
|
222
|
+
overwrite : bool, optional
|
|
223
|
+
If a file already exists, should it be overwritten?
|
|
224
|
+
recursive : bool, optional
|
|
225
|
+
Should nested folders be uploaded?
|
|
226
|
+
include_root : bool, optional
|
|
227
|
+
Should the local root folder itself be uploaded as the top folder?
|
|
228
|
+
ignore : Path or str or List[Path] or List[str], optional
|
|
229
|
+
Glob patterns of files to ignore, for example, ``**/*.pyc`` will
|
|
230
|
+
ignore all ``*.pyc`` files in the directory tree
|
|
231
|
+
|
|
232
|
+
"""
|
|
233
|
+
if not os.path.isdir(local_path):
|
|
234
|
+
raise NotADirectoryError(f'local path is not a directory: {local_path}')
|
|
235
|
+
if self.exists(stage_path) and not self.is_dir(stage_path):
|
|
236
|
+
raise NotADirectoryError(f'stage path is not a directory: {stage_path}')
|
|
237
|
+
|
|
238
|
+
ignore_files = set()
|
|
239
|
+
if ignore:
|
|
240
|
+
if isinstance(ignore, list):
|
|
241
|
+
for item in ignore:
|
|
242
|
+
ignore_files.update(glob.glob(str(item), recursive=recursive))
|
|
243
|
+
else:
|
|
244
|
+
ignore_files.update(glob.glob(str(ignore), recursive=recursive))
|
|
245
|
+
|
|
246
|
+
parent_dir = os.path.basename(os.getcwd())
|
|
247
|
+
|
|
248
|
+
files = glob.glob(os.path.join(local_path, '**'), recursive=recursive)
|
|
249
|
+
|
|
250
|
+
for src in files:
|
|
251
|
+
if ignore_files and src in ignore_files:
|
|
252
|
+
continue
|
|
253
|
+
target = os.path.join(parent_dir, src) if include_root else src
|
|
254
|
+
self.upload_file(src, target, overwrite=overwrite)
|
|
255
|
+
|
|
256
|
+
return self.info(stage_path)
|
|
257
|
+
|
|
258
|
+
def _upload(
|
|
259
|
+
self,
|
|
260
|
+
content: Union[str, bytes, io.IOBase],
|
|
261
|
+
stage_path: PathLike,
|
|
262
|
+
*,
|
|
263
|
+
overwrite: bool = False,
|
|
264
|
+
) -> FilesObject:
|
|
265
|
+
"""
|
|
266
|
+
Upload content to a stage file.
|
|
267
|
+
|
|
268
|
+
Parameters
|
|
269
|
+
----------
|
|
270
|
+
content : str or bytes or file-like
|
|
271
|
+
Content to upload to stage
|
|
272
|
+
stage_path : Path or str
|
|
273
|
+
Path to the stage file
|
|
274
|
+
overwrite : bool, optional
|
|
275
|
+
Should the ``stage_path`` be overwritten if it exists already?
|
|
276
|
+
|
|
277
|
+
"""
|
|
278
|
+
if self.exists(stage_path):
|
|
279
|
+
if not overwrite:
|
|
280
|
+
raise OSError(f'stage path already exists: {stage_path}')
|
|
281
|
+
self.remove(stage_path)
|
|
282
|
+
|
|
283
|
+
self._manager._put(
|
|
284
|
+
f'stage/{self._deployment_id}/fs/{stage_path}',
|
|
285
|
+
files={'file': content},
|
|
286
|
+
headers={'Content-Type': None},
|
|
287
|
+
)
|
|
288
|
+
|
|
289
|
+
return self.info(stage_path)
|
|
290
|
+
|
|
291
|
+
def mkdir(self, stage_path: PathLike, overwrite: bool = False) -> FilesObject:
|
|
292
|
+
"""
|
|
293
|
+
Make a directory in the stage.
|
|
294
|
+
|
|
295
|
+
Parameters
|
|
296
|
+
----------
|
|
297
|
+
stage_path : Path or str
|
|
298
|
+
Path of the folder to create
|
|
299
|
+
overwrite : bool, optional
|
|
300
|
+
Should the stage path be overwritten if it exists already?
|
|
301
|
+
|
|
302
|
+
Returns
|
|
303
|
+
-------
|
|
304
|
+
FilesObject
|
|
305
|
+
|
|
306
|
+
"""
|
|
307
|
+
stage_path = re.sub(r'/*$', r'', str(stage_path)) + '/'
|
|
308
|
+
|
|
309
|
+
if self.exists(stage_path):
|
|
310
|
+
if not overwrite:
|
|
311
|
+
return self.info(stage_path)
|
|
312
|
+
|
|
313
|
+
self.remove(stage_path)
|
|
314
|
+
|
|
315
|
+
self._manager._put(
|
|
316
|
+
f'stage/{self._deployment_id}/fs/{stage_path}?isFile=false',
|
|
317
|
+
)
|
|
318
|
+
|
|
319
|
+
return self.info(stage_path)
|
|
320
|
+
|
|
321
|
+
mkdirs = mkdir
|
|
322
|
+
|
|
323
|
+
def rename(
|
|
324
|
+
self,
|
|
325
|
+
old_path: PathLike,
|
|
326
|
+
new_path: PathLike,
|
|
327
|
+
*,
|
|
328
|
+
overwrite: bool = False,
|
|
329
|
+
) -> FilesObject:
|
|
330
|
+
"""
|
|
331
|
+
Move the stage file to a new location.
|
|
332
|
+
|
|
333
|
+
Paraemeters
|
|
334
|
+
-----------
|
|
335
|
+
old_path : Path or str
|
|
336
|
+
Original location of the path
|
|
337
|
+
new_path : Path or str
|
|
338
|
+
New location of the path
|
|
339
|
+
overwrite : bool, optional
|
|
340
|
+
Should the ``new_path`` be overwritten if it exists already?
|
|
341
|
+
|
|
342
|
+
"""
|
|
343
|
+
if not self.exists(old_path):
|
|
344
|
+
raise OSError(f'stage path does not exist: {old_path}')
|
|
345
|
+
|
|
346
|
+
if self.exists(new_path):
|
|
347
|
+
if not overwrite:
|
|
348
|
+
raise OSError(f'stage path already exists: {new_path}')
|
|
349
|
+
|
|
350
|
+
if str(old_path).endswith('/') and not str(new_path).endswith('/'):
|
|
351
|
+
raise OSError('original and new paths are not the same type')
|
|
352
|
+
|
|
353
|
+
if str(new_path).endswith('/'):
|
|
354
|
+
self.removedirs(new_path)
|
|
355
|
+
else:
|
|
356
|
+
self.remove(new_path)
|
|
357
|
+
|
|
358
|
+
self._manager._patch(
|
|
359
|
+
f'stage/{self._deployment_id}/fs/{old_path}',
|
|
360
|
+
json=dict(newPath=new_path),
|
|
361
|
+
)
|
|
362
|
+
|
|
363
|
+
return self.info(new_path)
|
|
364
|
+
|
|
365
|
+
def info(self, stage_path: PathLike) -> FilesObject:
|
|
366
|
+
"""
|
|
367
|
+
Return information about a stage location.
|
|
368
|
+
|
|
369
|
+
Parameters
|
|
370
|
+
----------
|
|
371
|
+
stage_path : Path or str
|
|
372
|
+
Path to the stage location
|
|
373
|
+
|
|
374
|
+
Returns
|
|
375
|
+
-------
|
|
376
|
+
FilesObject
|
|
377
|
+
|
|
378
|
+
"""
|
|
379
|
+
res = self._manager._get(
|
|
380
|
+
re.sub(r'/+$', r'/', f'stage/{self._deployment_id}/fs/{stage_path}'),
|
|
381
|
+
params=dict(metadata=1),
|
|
382
|
+
).json()
|
|
383
|
+
|
|
384
|
+
return FilesObject.from_dict(res, self)
|
|
385
|
+
|
|
386
|
+
def exists(self, stage_path: PathLike) -> bool:
|
|
387
|
+
"""
|
|
388
|
+
Does the given stage path exist?
|
|
389
|
+
|
|
390
|
+
Parameters
|
|
391
|
+
----------
|
|
392
|
+
stage_path : Path or str
|
|
393
|
+
Path to stage object
|
|
394
|
+
|
|
395
|
+
Returns
|
|
396
|
+
-------
|
|
397
|
+
bool
|
|
398
|
+
|
|
399
|
+
"""
|
|
400
|
+
try:
|
|
401
|
+
self.info(stage_path)
|
|
402
|
+
return True
|
|
403
|
+
except ManagementError as exc:
|
|
404
|
+
if exc.errno == 404:
|
|
405
|
+
return False
|
|
406
|
+
raise
|
|
407
|
+
|
|
408
|
+
def is_dir(self, stage_path: PathLike) -> bool:
|
|
409
|
+
"""
|
|
410
|
+
Is the given stage path a directory?
|
|
411
|
+
|
|
412
|
+
Parameters
|
|
413
|
+
----------
|
|
414
|
+
stage_path : Path or str
|
|
415
|
+
Path to stage object
|
|
416
|
+
|
|
417
|
+
Returns
|
|
418
|
+
-------
|
|
419
|
+
bool
|
|
420
|
+
|
|
421
|
+
"""
|
|
422
|
+
try:
|
|
423
|
+
return self.info(stage_path).type == 'directory'
|
|
424
|
+
except ManagementError as exc:
|
|
425
|
+
if exc.errno == 404:
|
|
426
|
+
return False
|
|
427
|
+
raise
|
|
428
|
+
|
|
429
|
+
def is_file(self, stage_path: PathLike) -> bool:
|
|
430
|
+
"""
|
|
431
|
+
Is the given stage path a file?
|
|
432
|
+
|
|
433
|
+
Parameters
|
|
434
|
+
----------
|
|
435
|
+
stage_path : Path or str
|
|
436
|
+
Path to stage object
|
|
437
|
+
|
|
438
|
+
Returns
|
|
439
|
+
-------
|
|
440
|
+
bool
|
|
441
|
+
|
|
442
|
+
"""
|
|
443
|
+
try:
|
|
444
|
+
return self.info(stage_path).type != 'directory'
|
|
445
|
+
except ManagementError as exc:
|
|
446
|
+
if exc.errno == 404:
|
|
447
|
+
return False
|
|
448
|
+
raise
|
|
449
|
+
|
|
450
|
+
def _listdir(self, stage_path: PathLike, *, recursive: bool = False) -> List[str]:
|
|
451
|
+
"""
|
|
452
|
+
Return the names of files in a directory.
|
|
453
|
+
|
|
454
|
+
Parameters
|
|
455
|
+
----------
|
|
456
|
+
stage_path : Path or str
|
|
457
|
+
Path to the folder in Stage
|
|
458
|
+
recursive : bool, optional
|
|
459
|
+
Should folders be listed recursively?
|
|
460
|
+
|
|
461
|
+
"""
|
|
462
|
+
res = self._manager._get(
|
|
463
|
+
re.sub(r'/+$', r'/', f'stage/{self._deployment_id}/fs/{stage_path}'),
|
|
464
|
+
).json()
|
|
465
|
+
if recursive:
|
|
466
|
+
out = []
|
|
467
|
+
for item in res['content'] or []:
|
|
468
|
+
out.append(item['path'])
|
|
469
|
+
if item['type'] == 'directory':
|
|
470
|
+
out.extend(self._listdir(item['path'], recursive=recursive))
|
|
471
|
+
return out
|
|
472
|
+
return [x['path'] for x in res['content'] or []]
|
|
473
|
+
|
|
474
|
+
def listdir(
|
|
475
|
+
self,
|
|
476
|
+
stage_path: PathLike = '/',
|
|
477
|
+
*,
|
|
478
|
+
recursive: bool = False,
|
|
479
|
+
) -> List[str]:
|
|
480
|
+
"""
|
|
481
|
+
List the files / folders at the given path.
|
|
482
|
+
|
|
483
|
+
Parameters
|
|
484
|
+
----------
|
|
485
|
+
stage_path : Path or str, optional
|
|
486
|
+
Path to the stage location
|
|
487
|
+
|
|
488
|
+
Returns
|
|
489
|
+
-------
|
|
490
|
+
List[str]
|
|
491
|
+
|
|
492
|
+
"""
|
|
493
|
+
stage_path = re.sub(r'^(\./|/)+', r'', str(stage_path))
|
|
494
|
+
stage_path = re.sub(r'/+$', r'', stage_path) + '/'
|
|
495
|
+
|
|
496
|
+
if self.is_dir(stage_path):
|
|
497
|
+
out = self._listdir(stage_path, recursive=recursive)
|
|
498
|
+
if stage_path != '/':
|
|
499
|
+
stage_path_n = len(stage_path.split('/')) - 1
|
|
500
|
+
out = ['/'.join(x.split('/')[stage_path_n:]) for x in out]
|
|
501
|
+
return out
|
|
502
|
+
|
|
503
|
+
raise NotADirectoryError(f'stage path is not a directory: {stage_path}')
|
|
504
|
+
|
|
505
|
+
def download_file(
|
|
506
|
+
self,
|
|
507
|
+
stage_path: PathLike,
|
|
508
|
+
local_path: Optional[PathLike] = None,
|
|
509
|
+
*,
|
|
510
|
+
overwrite: bool = False,
|
|
511
|
+
encoding: Optional[str] = None,
|
|
512
|
+
) -> Optional[Union[bytes, str]]:
|
|
513
|
+
"""
|
|
514
|
+
Download the content of a stage path.
|
|
515
|
+
|
|
516
|
+
Parameters
|
|
517
|
+
----------
|
|
518
|
+
stage_path : Path or str
|
|
519
|
+
Path to the stage file
|
|
520
|
+
local_path : Path or str
|
|
521
|
+
Path to local file target location
|
|
522
|
+
overwrite : bool, optional
|
|
523
|
+
Should an existing file be overwritten if it exists?
|
|
524
|
+
encoding : str, optional
|
|
525
|
+
Encoding used to convert the resulting data
|
|
526
|
+
|
|
527
|
+
Returns
|
|
528
|
+
-------
|
|
529
|
+
bytes or str - ``local_path`` is None
|
|
530
|
+
None - ``local_path`` is a Path or str
|
|
531
|
+
|
|
532
|
+
"""
|
|
533
|
+
if local_path is not None and not overwrite and os.path.exists(local_path):
|
|
534
|
+
raise OSError('target file already exists; use overwrite=True to replace')
|
|
535
|
+
if self.is_dir(stage_path):
|
|
536
|
+
raise IsADirectoryError(f'stage path is a directory: {stage_path}')
|
|
537
|
+
|
|
538
|
+
out = self._manager._get(
|
|
539
|
+
f'stage/{self._deployment_id}/fs/{stage_path}',
|
|
540
|
+
).content
|
|
541
|
+
|
|
542
|
+
if local_path is not None:
|
|
543
|
+
with open(local_path, 'wb') as outfile:
|
|
544
|
+
outfile.write(out)
|
|
545
|
+
return None
|
|
546
|
+
|
|
547
|
+
if encoding:
|
|
548
|
+
return out.decode(encoding)
|
|
549
|
+
|
|
550
|
+
return out
|
|
551
|
+
|
|
552
|
+
def download_folder(
|
|
553
|
+
self,
|
|
554
|
+
stage_path: PathLike,
|
|
555
|
+
local_path: PathLike = '.',
|
|
556
|
+
*,
|
|
557
|
+
overwrite: bool = False,
|
|
558
|
+
) -> None:
|
|
559
|
+
"""
|
|
560
|
+
Download a Stage folder to a local directory.
|
|
561
|
+
|
|
562
|
+
Parameters
|
|
563
|
+
----------
|
|
564
|
+
stage_path : Path or str
|
|
565
|
+
Path to the stage file
|
|
566
|
+
local_path : Path or str
|
|
567
|
+
Path to local directory target location
|
|
568
|
+
overwrite : bool, optional
|
|
569
|
+
Should an existing directory / files be overwritten if they exist?
|
|
570
|
+
|
|
571
|
+
"""
|
|
572
|
+
if local_path is not None and not overwrite and os.path.exists(local_path):
|
|
573
|
+
raise OSError(
|
|
574
|
+
'target directory already exists; '
|
|
575
|
+
'use overwrite=True to replace',
|
|
576
|
+
)
|
|
577
|
+
if not self.is_dir(stage_path):
|
|
578
|
+
raise NotADirectoryError(f'stage path is not a directory: {stage_path}')
|
|
579
|
+
|
|
580
|
+
for f in self.listdir(stage_path, recursive=True):
|
|
581
|
+
if self.is_dir(f):
|
|
582
|
+
continue
|
|
583
|
+
target = os.path.normpath(os.path.join(local_path, f))
|
|
584
|
+
os.makedirs(os.path.dirname(target), exist_ok=True)
|
|
585
|
+
self.download_file(f, target, overwrite=overwrite)
|
|
586
|
+
|
|
587
|
+
def remove(self, stage_path: PathLike) -> None:
|
|
588
|
+
"""
|
|
589
|
+
Delete a stage location.
|
|
590
|
+
|
|
591
|
+
Parameters
|
|
592
|
+
----------
|
|
593
|
+
stage_path : Path or str
|
|
594
|
+
Path to the stage location
|
|
595
|
+
|
|
596
|
+
"""
|
|
597
|
+
if self.is_dir(stage_path):
|
|
598
|
+
raise IsADirectoryError(
|
|
599
|
+
'stage path is a directory, '
|
|
600
|
+
f'use rmdir or removedirs: {stage_path}',
|
|
601
|
+
)
|
|
602
|
+
|
|
603
|
+
self._manager._delete(f'stage/{self._deployment_id}/fs/{stage_path}')
|
|
604
|
+
|
|
605
|
+
def removedirs(self, stage_path: PathLike) -> None:
|
|
606
|
+
"""
|
|
607
|
+
Delete a stage folder recursively.
|
|
608
|
+
|
|
609
|
+
Parameters
|
|
610
|
+
----------
|
|
611
|
+
stage_path : Path or str
|
|
612
|
+
Path to the stage location
|
|
613
|
+
|
|
614
|
+
"""
|
|
615
|
+
stage_path = re.sub(r'/*$', r'', str(stage_path)) + '/'
|
|
616
|
+
self._manager._delete(f'stage/{self._deployment_id}/fs/{stage_path}')
|
|
617
|
+
|
|
618
|
+
def rmdir(self, stage_path: PathLike) -> None:
|
|
619
|
+
"""
|
|
620
|
+
Delete a stage folder.
|
|
621
|
+
|
|
622
|
+
Parameters
|
|
623
|
+
----------
|
|
624
|
+
stage_path : Path or str
|
|
625
|
+
Path to the stage location
|
|
626
|
+
|
|
627
|
+
"""
|
|
628
|
+
stage_path = re.sub(r'/*$', r'', str(stage_path)) + '/'
|
|
629
|
+
|
|
630
|
+
if self.listdir(stage_path):
|
|
631
|
+
raise OSError(f'stage folder is not empty, use removedirs: {stage_path}')
|
|
632
|
+
|
|
633
|
+
self._manager._delete(f'stage/{self._deployment_id}/fs/{stage_path}')
|
|
634
|
+
|
|
635
|
+
def __str__(self) -> str:
|
|
636
|
+
"""Return string representation."""
|
|
637
|
+
return vars_to_str(self)
|
|
638
|
+
|
|
639
|
+
def __repr__(self) -> str:
|
|
640
|
+
"""Return string representation."""
|
|
641
|
+
return str(self)
|
|
642
|
+
|
|
643
|
+
|
|
644
|
+
StageObject = FilesObject # alias for backward compatibility
|
|
645
|
+
|
|
646
|
+
|
|
647
|
+
class Workspace(object):
|
|
648
|
+
"""
|
|
649
|
+
SingleStoreDB workspace definition.
|
|
650
|
+
|
|
651
|
+
This object is not instantiated directly. It is used in the results
|
|
652
|
+
of API calls on the :class:`WorkspaceManager`. Workspaces are created using
|
|
653
|
+
:meth:`WorkspaceManager.create_workspace`, or existing workspaces are
|
|
654
|
+
accessed by either :attr:`WorkspaceManager.workspaces` or by calling
|
|
655
|
+
:meth:`WorkspaceManager.get_workspace`.
|
|
656
|
+
|
|
657
|
+
See Also
|
|
658
|
+
--------
|
|
659
|
+
:meth:`WorkspaceManager.create_workspace`
|
|
660
|
+
:meth:`WorkspaceManager.get_workspace`
|
|
661
|
+
:attr:`WorkspaceManager.workspaces`
|
|
662
|
+
|
|
663
|
+
"""
|
|
664
|
+
|
|
665
|
+
name: str
|
|
666
|
+
id: str
|
|
667
|
+
group_id: str
|
|
668
|
+
size: str
|
|
669
|
+
state: str
|
|
670
|
+
created_at: Optional[datetime.datetime]
|
|
671
|
+
terminated_at: Optional[datetime.datetime]
|
|
672
|
+
endpoint: Optional[str]
|
|
673
|
+
auto_suspend: Optional[Dict[str, Any]]
|
|
674
|
+
cache_config: Optional[int]
|
|
675
|
+
deployment_type: Optional[str]
|
|
676
|
+
resume_attachments: Optional[List[Dict[str, Any]]]
|
|
677
|
+
scaling_progress: Optional[int]
|
|
678
|
+
last_resumed_at: Optional[datetime.datetime]
|
|
679
|
+
|
|
680
|
+
def __init__(
|
|
681
|
+
self,
|
|
682
|
+
name: str,
|
|
683
|
+
workspace_id: str,
|
|
684
|
+
workspace_group: Union[str, 'WorkspaceGroup'],
|
|
685
|
+
size: str,
|
|
686
|
+
state: str,
|
|
687
|
+
created_at: Union[str, datetime.datetime],
|
|
688
|
+
terminated_at: Optional[Union[str, datetime.datetime]] = None,
|
|
689
|
+
endpoint: Optional[str] = None,
|
|
690
|
+
auto_suspend: Optional[Dict[str, Any]] = None,
|
|
691
|
+
cache_config: Optional[int] = None,
|
|
692
|
+
deployment_type: Optional[str] = None,
|
|
693
|
+
resume_attachments: Optional[List[Dict[str, Any]]] = None,
|
|
694
|
+
scaling_progress: Optional[int] = None,
|
|
695
|
+
last_resumed_at: Optional[Union[str, datetime.datetime]] = None,
|
|
696
|
+
):
|
|
697
|
+
#: Name of the workspace
|
|
698
|
+
self.name = name
|
|
699
|
+
|
|
700
|
+
#: Unique ID of the workspace
|
|
701
|
+
self.id = workspace_id
|
|
702
|
+
|
|
703
|
+
#: Unique ID of the workspace group
|
|
704
|
+
if isinstance(workspace_group, WorkspaceGroup):
|
|
705
|
+
self.group_id = workspace_group.id
|
|
706
|
+
else:
|
|
707
|
+
self.group_id = workspace_group
|
|
708
|
+
|
|
709
|
+
#: Size of the workspace in workspace size notation (S-00, S-1, etc.)
|
|
710
|
+
self.size = size
|
|
711
|
+
|
|
712
|
+
#: State of the workspace: PendingCreation, Transitioning, Active,
|
|
713
|
+
#: Terminated, Suspended, Resuming, Failed
|
|
714
|
+
self.state = state.strip()
|
|
715
|
+
|
|
716
|
+
#: Timestamp of when the workspace was created
|
|
717
|
+
self.created_at = to_datetime(created_at)
|
|
718
|
+
|
|
719
|
+
#: Timestamp of when the workspace was terminated
|
|
720
|
+
self.terminated_at = to_datetime(terminated_at)
|
|
721
|
+
|
|
722
|
+
#: Hostname (or IP address) of the workspace database server
|
|
723
|
+
self.endpoint = endpoint
|
|
724
|
+
|
|
725
|
+
#: Current auto-suspend settings
|
|
726
|
+
self.auto_suspend = camel_to_snake_dict(auto_suspend)
|
|
727
|
+
|
|
728
|
+
#: Multiplier for the persistent cache
|
|
729
|
+
self.cache_config = cache_config
|
|
730
|
+
|
|
731
|
+
#: Deployment type of the workspace
|
|
732
|
+
self.deployment_type = deployment_type
|
|
733
|
+
|
|
734
|
+
#: Database attachments
|
|
735
|
+
self.resume_attachments = [
|
|
736
|
+
camel_to_snake_dict(x) # type: ignore
|
|
737
|
+
for x in resume_attachments or []
|
|
738
|
+
if x is not None
|
|
739
|
+
]
|
|
740
|
+
|
|
741
|
+
#: Current progress percentage for scaling the workspace
|
|
742
|
+
self.scaling_progress = scaling_progress
|
|
743
|
+
|
|
744
|
+
#: Timestamp when workspace was last resumed
|
|
745
|
+
self.last_resumed_at = to_datetime(last_resumed_at)
|
|
746
|
+
|
|
747
|
+
self._manager: Optional[WorkspaceManager] = None
|
|
748
|
+
|
|
749
|
+
def __str__(self) -> str:
|
|
750
|
+
"""Return string representation."""
|
|
751
|
+
return vars_to_str(self)
|
|
752
|
+
|
|
753
|
+
def __repr__(self) -> str:
|
|
754
|
+
"""Return string representation."""
|
|
755
|
+
return str(self)
|
|
756
|
+
|
|
757
|
+
@classmethod
|
|
758
|
+
def from_dict(cls, obj: Dict[str, Any], manager: 'WorkspaceManager') -> 'Workspace':
|
|
759
|
+
"""
|
|
760
|
+
Construct a Workspace from a dictionary of values.
|
|
761
|
+
|
|
762
|
+
Parameters
|
|
763
|
+
----------
|
|
764
|
+
obj : dict
|
|
765
|
+
Dictionary of values
|
|
766
|
+
manager : WorkspaceManager, optional
|
|
767
|
+
The WorkspaceManager the Workspace belongs to
|
|
768
|
+
|
|
769
|
+
Returns
|
|
770
|
+
-------
|
|
771
|
+
:class:`Workspace`
|
|
772
|
+
|
|
773
|
+
"""
|
|
774
|
+
out = cls(
|
|
775
|
+
name=obj['name'],
|
|
776
|
+
workspace_id=obj['workspaceID'],
|
|
777
|
+
workspace_group=obj['workspaceGroupID'],
|
|
778
|
+
size=obj.get('size', 'Unknown'),
|
|
779
|
+
state=obj['state'],
|
|
780
|
+
created_at=obj['createdAt'],
|
|
781
|
+
terminated_at=obj.get('terminatedAt'),
|
|
782
|
+
endpoint=obj.get('endpoint'),
|
|
783
|
+
auto_suspend=obj.get('autoSuspend'),
|
|
784
|
+
cache_config=obj.get('cacheConfig'),
|
|
785
|
+
deployment_type=obj.get('deploymentType'),
|
|
786
|
+
last_resumed_at=obj.get('lastResumedAt'),
|
|
787
|
+
resume_attachments=obj.get('resumeAttachments'),
|
|
788
|
+
scaling_progress=obj.get('scalingProgress'),
|
|
789
|
+
)
|
|
790
|
+
out._manager = manager
|
|
791
|
+
return out
|
|
792
|
+
|
|
793
|
+
def update(
|
|
794
|
+
self,
|
|
795
|
+
auto_suspend: Optional[Dict[str, Any]] = None,
|
|
796
|
+
cache_config: Optional[int] = None,
|
|
797
|
+
deployment_type: Optional[str] = None,
|
|
798
|
+
size: Optional[str] = None,
|
|
799
|
+
) -> None:
|
|
800
|
+
"""
|
|
801
|
+
Update the workspace definition.
|
|
802
|
+
|
|
803
|
+
Parameters
|
|
804
|
+
----------
|
|
805
|
+
auto_suspend : Dict[str, Any], optional
|
|
806
|
+
Auto-suspend mode for the workspace: IDLE, SCHEDULED, DISABLED
|
|
807
|
+
cache_config : int, optional
|
|
808
|
+
Specifies the multiplier for the persistent cache associated
|
|
809
|
+
with the workspace. If specified, it enables the cache configuration
|
|
810
|
+
multiplier. It can have one of the following values: 1, 2, or 4.
|
|
811
|
+
deployment_type : str, optional
|
|
812
|
+
The deployment type that will be applied to all the workspaces
|
|
813
|
+
within the group
|
|
814
|
+
size : str, optional
|
|
815
|
+
Size of the workspace (in workspace size notation), such as "S-1".
|
|
816
|
+
|
|
817
|
+
"""
|
|
818
|
+
if self._manager is None:
|
|
819
|
+
raise ManagementError(
|
|
820
|
+
msg='No workspace manager is associated with this object.',
|
|
821
|
+
)
|
|
822
|
+
data = {
|
|
823
|
+
k: v for k, v in dict(
|
|
824
|
+
autoSuspend=snake_to_camel_dict(auto_suspend),
|
|
825
|
+
cacheConfig=cache_config,
|
|
826
|
+
deploymentType=deployment_type,
|
|
827
|
+
size=size,
|
|
828
|
+
).items() if v is not None
|
|
829
|
+
}
|
|
830
|
+
self._manager._patch(f'workspaces/{self.id}', json=data)
|
|
831
|
+
self.refresh()
|
|
832
|
+
|
|
833
|
+
def refresh(self) -> Workspace:
|
|
834
|
+
"""Update the object to the current state."""
|
|
835
|
+
if self._manager is None:
|
|
836
|
+
raise ManagementError(
|
|
837
|
+
msg='No workspace manager is associated with this object.',
|
|
838
|
+
)
|
|
839
|
+
new_obj = self._manager.get_workspace(self.id)
|
|
840
|
+
for name, value in vars(new_obj).items():
|
|
841
|
+
if isinstance(value, Mapping):
|
|
842
|
+
setattr(self, name, snake_to_camel_dict(value))
|
|
843
|
+
else:
|
|
844
|
+
setattr(self, name, value)
|
|
845
|
+
return self
|
|
846
|
+
|
|
847
|
+
def terminate(
|
|
848
|
+
self,
|
|
849
|
+
wait_on_terminated: bool = False,
|
|
850
|
+
wait_interval: int = 10,
|
|
851
|
+
wait_timeout: int = 600,
|
|
852
|
+
force: bool = False,
|
|
853
|
+
) -> None:
|
|
854
|
+
"""
|
|
855
|
+
Terminate the workspace.
|
|
856
|
+
|
|
857
|
+
Parameters
|
|
858
|
+
----------
|
|
859
|
+
wait_on_terminated : bool, optional
|
|
860
|
+
Wait for the workspace to go into 'Terminated' mode before returning
|
|
861
|
+
wait_interval : int, optional
|
|
862
|
+
Number of seconds between each server check
|
|
863
|
+
wait_timeout : int, optional
|
|
864
|
+
Total number of seconds to check server before giving up
|
|
865
|
+
force : bool, optional
|
|
866
|
+
Should the workspace group be terminated even if it has workspaces?
|
|
867
|
+
|
|
868
|
+
Raises
|
|
869
|
+
------
|
|
870
|
+
ManagementError
|
|
871
|
+
If timeout is reached
|
|
872
|
+
|
|
873
|
+
"""
|
|
874
|
+
if self._manager is None:
|
|
875
|
+
raise ManagementError(
|
|
876
|
+
msg='No workspace manager is associated with this object.',
|
|
877
|
+
)
|
|
878
|
+
force_str = 'true' if force else 'false'
|
|
879
|
+
self._manager._delete(f'workspaces/{self.id}?force={force_str}')
|
|
880
|
+
if wait_on_terminated:
|
|
881
|
+
self._manager._wait_on_state(
|
|
882
|
+
self._manager.get_workspace(self.id),
|
|
883
|
+
'Terminated', interval=wait_interval, timeout=wait_timeout,
|
|
884
|
+
)
|
|
885
|
+
self.refresh()
|
|
886
|
+
|
|
887
|
+
def connect(self, **kwargs: Any) -> connection.Connection:
|
|
888
|
+
"""
|
|
889
|
+
Create a connection to the database server for this workspace.
|
|
890
|
+
|
|
891
|
+
Parameters
|
|
892
|
+
----------
|
|
893
|
+
**kwargs : keyword-arguments, optional
|
|
894
|
+
Parameters to the SingleStoreDB `connect` function except host
|
|
895
|
+
and port which are supplied by the workspace object
|
|
896
|
+
|
|
897
|
+
Returns
|
|
898
|
+
-------
|
|
899
|
+
:class:`Connection`
|
|
900
|
+
|
|
901
|
+
"""
|
|
902
|
+
if not self.endpoint:
|
|
903
|
+
raise ManagementError(
|
|
904
|
+
msg='An endpoint has not been set in this workspace configuration',
|
|
905
|
+
)
|
|
906
|
+
kwargs['host'] = self.endpoint
|
|
907
|
+
return connection.connect(**kwargs)
|
|
908
|
+
|
|
909
|
+
def suspend(
|
|
910
|
+
self,
|
|
911
|
+
wait_on_suspended: bool = False,
|
|
912
|
+
wait_interval: int = 20,
|
|
913
|
+
wait_timeout: int = 600,
|
|
914
|
+
) -> None:
|
|
915
|
+
"""
|
|
916
|
+
Suspend the workspace.
|
|
917
|
+
|
|
918
|
+
Parameters
|
|
919
|
+
----------
|
|
920
|
+
wait_on_suspended : bool, optional
|
|
921
|
+
Wait for the workspace to go into 'Suspended' mode before returning
|
|
922
|
+
wait_interval : int, optional
|
|
923
|
+
Number of seconds between each server check
|
|
924
|
+
wait_timeout : int, optional
|
|
925
|
+
Total number of seconds to check server before giving up
|
|
926
|
+
|
|
927
|
+
Raises
|
|
928
|
+
------
|
|
929
|
+
ManagementError
|
|
930
|
+
If timeout is reached
|
|
931
|
+
|
|
932
|
+
"""
|
|
933
|
+
if self._manager is None:
|
|
934
|
+
raise ManagementError(
|
|
935
|
+
msg='No workspace manager is associated with this object.',
|
|
936
|
+
)
|
|
937
|
+
self._manager._post(f'workspaces/{self.id}/suspend')
|
|
938
|
+
if wait_on_suspended:
|
|
939
|
+
self._manager._wait_on_state(
|
|
940
|
+
self._manager.get_workspace(self.id),
|
|
941
|
+
'Suspended', interval=wait_interval, timeout=wait_timeout,
|
|
942
|
+
)
|
|
943
|
+
self.refresh()
|
|
944
|
+
|
|
945
|
+
def resume(
|
|
946
|
+
self,
|
|
947
|
+
disable_auto_suspend: bool = False,
|
|
948
|
+
wait_on_resumed: bool = False,
|
|
949
|
+
wait_interval: int = 20,
|
|
950
|
+
wait_timeout: int = 600,
|
|
951
|
+
) -> None:
|
|
952
|
+
"""
|
|
953
|
+
Resume the workspace.
|
|
954
|
+
|
|
955
|
+
Parameters
|
|
956
|
+
----------
|
|
957
|
+
disable_auto_suspend : bool, optional
|
|
958
|
+
Should auto-suspend be disabled?
|
|
959
|
+
wait_on_resumed : bool, optional
|
|
960
|
+
Wait for the workspace to go into 'Resumed' or 'Active' mode before returning
|
|
961
|
+
wait_interval : int, optional
|
|
962
|
+
Number of seconds between each server check
|
|
963
|
+
wait_timeout : int, optional
|
|
964
|
+
Total number of seconds to check server before giving up
|
|
965
|
+
|
|
966
|
+
Raises
|
|
967
|
+
------
|
|
968
|
+
ManagementError
|
|
969
|
+
If timeout is reached
|
|
970
|
+
|
|
971
|
+
"""
|
|
972
|
+
if self._manager is None:
|
|
973
|
+
raise ManagementError(
|
|
974
|
+
msg='No workspace manager is associated with this object.',
|
|
975
|
+
)
|
|
976
|
+
self._manager._post(
|
|
977
|
+
f'workspaces/{self.id}/resume',
|
|
978
|
+
json=dict(disableAutoSuspend=disable_auto_suspend),
|
|
979
|
+
)
|
|
980
|
+
if wait_on_resumed:
|
|
981
|
+
self._manager._wait_on_state(
|
|
982
|
+
self._manager.get_workspace(self.id),
|
|
983
|
+
['Resumed', 'Active'], interval=wait_interval, timeout=wait_timeout,
|
|
984
|
+
)
|
|
985
|
+
self.refresh()
|
|
986
|
+
|
|
987
|
+
|
|
988
|
+
class WorkspaceGroup(object):
|
|
989
|
+
"""
|
|
990
|
+
SingleStoreDB workspace group definition.
|
|
991
|
+
|
|
992
|
+
This object is not instantiated directly. It is used in the results
|
|
993
|
+
of API calls on the :class:`WorkspaceManager`. Workspace groups are created using
|
|
994
|
+
:meth:`WorkspaceManager.create_workspace_group`, or existing workspace groups are
|
|
995
|
+
accessed by either :attr:`WorkspaceManager.workspace_groups` or by calling
|
|
996
|
+
:meth:`WorkspaceManager.get_workspace_group`.
|
|
997
|
+
|
|
998
|
+
See Also
|
|
999
|
+
--------
|
|
1000
|
+
:meth:`WorkspaceManager.create_workspace_group`
|
|
1001
|
+
:meth:`WorkspaceManager.get_workspace_group`
|
|
1002
|
+
:attr:`WorkspaceManager.workspace_groups`
|
|
1003
|
+
|
|
1004
|
+
"""
|
|
1005
|
+
|
|
1006
|
+
name: str
|
|
1007
|
+
id: str
|
|
1008
|
+
created_at: Optional[datetime.datetime]
|
|
1009
|
+
region: Optional[Region]
|
|
1010
|
+
firewall_ranges: List[str]
|
|
1011
|
+
terminated_at: Optional[datetime.datetime]
|
|
1012
|
+
allow_all_traffic: bool
|
|
1013
|
+
|
|
1014
|
+
def __init__(
|
|
1015
|
+
self,
|
|
1016
|
+
name: str,
|
|
1017
|
+
id: str,
|
|
1018
|
+
created_at: Union[str, datetime.datetime],
|
|
1019
|
+
region: Optional[Region],
|
|
1020
|
+
firewall_ranges: List[str],
|
|
1021
|
+
terminated_at: Optional[Union[str, datetime.datetime]],
|
|
1022
|
+
allow_all_traffic: Optional[bool],
|
|
1023
|
+
):
|
|
1024
|
+
#: Name of the workspace group
|
|
1025
|
+
self.name = name
|
|
1026
|
+
|
|
1027
|
+
#: Unique ID of the workspace group
|
|
1028
|
+
self.id = id
|
|
1029
|
+
|
|
1030
|
+
#: Timestamp of when the workspace group was created
|
|
1031
|
+
self.created_at = to_datetime(created_at)
|
|
1032
|
+
|
|
1033
|
+
#: Region of the workspace group (see :class:`Region`)
|
|
1034
|
+
self.region = region
|
|
1035
|
+
|
|
1036
|
+
#: List of allowed incoming IP addresses / ranges
|
|
1037
|
+
self.firewall_ranges = firewall_ranges
|
|
1038
|
+
|
|
1039
|
+
#: Timestamp of when the workspace group was terminated
|
|
1040
|
+
self.terminated_at = to_datetime(terminated_at)
|
|
1041
|
+
|
|
1042
|
+
#: Should all traffic be allowed?
|
|
1043
|
+
self.allow_all_traffic = allow_all_traffic or False
|
|
1044
|
+
|
|
1045
|
+
self._manager: Optional[WorkspaceManager] = None
|
|
1046
|
+
|
|
1047
|
+
def __str__(self) -> str:
|
|
1048
|
+
"""Return string representation."""
|
|
1049
|
+
return vars_to_str(self)
|
|
1050
|
+
|
|
1051
|
+
def __repr__(self) -> str:
|
|
1052
|
+
"""Return string representation."""
|
|
1053
|
+
return str(self)
|
|
1054
|
+
|
|
1055
|
+
@classmethod
|
|
1056
|
+
def from_dict(
|
|
1057
|
+
cls, obj: Dict[str, Any], manager: 'WorkspaceManager',
|
|
1058
|
+
) -> 'WorkspaceGroup':
|
|
1059
|
+
"""
|
|
1060
|
+
Construct a WorkspaceGroup from a dictionary of values.
|
|
1061
|
+
|
|
1062
|
+
Parameters
|
|
1063
|
+
----------
|
|
1064
|
+
obj : dict
|
|
1065
|
+
Dictionary of values
|
|
1066
|
+
manager : WorkspaceManager, optional
|
|
1067
|
+
The WorkspaceManager the WorkspaceGroup belongs to
|
|
1068
|
+
|
|
1069
|
+
Returns
|
|
1070
|
+
-------
|
|
1071
|
+
:class:`WorkspaceGroup`
|
|
1072
|
+
|
|
1073
|
+
"""
|
|
1074
|
+
try:
|
|
1075
|
+
region = [x for x in manager.regions if x.id == obj['regionID']][0]
|
|
1076
|
+
except IndexError:
|
|
1077
|
+
region = Region('<unknown>', '<unknown>', obj.get('regionID', '<unknown>'))
|
|
1078
|
+
out = cls(
|
|
1079
|
+
name=obj['name'],
|
|
1080
|
+
id=obj['workspaceGroupID'],
|
|
1081
|
+
created_at=obj['createdAt'],
|
|
1082
|
+
region=region,
|
|
1083
|
+
firewall_ranges=obj.get('firewallRanges', []),
|
|
1084
|
+
terminated_at=obj.get('terminatedAt'),
|
|
1085
|
+
allow_all_traffic=obj.get('allowAllTraffic'),
|
|
1086
|
+
)
|
|
1087
|
+
out._manager = manager
|
|
1088
|
+
return out
|
|
1089
|
+
|
|
1090
|
+
@property
|
|
1091
|
+
def organization(self) -> Organization:
|
|
1092
|
+
if self._manager is None:
|
|
1093
|
+
raise ManagementError(
|
|
1094
|
+
msg='No workspace manager is associated with this object.',
|
|
1095
|
+
)
|
|
1096
|
+
return self._manager.organization
|
|
1097
|
+
|
|
1098
|
+
@property
|
|
1099
|
+
def stage(self) -> Stage:
|
|
1100
|
+
"""Stage manager."""
|
|
1101
|
+
if self._manager is None:
|
|
1102
|
+
raise ManagementError(
|
|
1103
|
+
msg='No workspace manager is associated with this object.',
|
|
1104
|
+
)
|
|
1105
|
+
return Stage(self.id, self._manager)
|
|
1106
|
+
|
|
1107
|
+
stages = stage
|
|
1108
|
+
|
|
1109
|
+
def refresh(self) -> 'WorkspaceGroup':
|
|
1110
|
+
"""Update the object to the current state."""
|
|
1111
|
+
if self._manager is None:
|
|
1112
|
+
raise ManagementError(
|
|
1113
|
+
msg='No workspace manager is associated with this object.',
|
|
1114
|
+
)
|
|
1115
|
+
new_obj = self._manager.get_workspace_group(self.id)
|
|
1116
|
+
for name, value in vars(new_obj).items():
|
|
1117
|
+
if isinstance(value, Mapping):
|
|
1118
|
+
setattr(self, name, camel_to_snake_dict(value))
|
|
1119
|
+
else:
|
|
1120
|
+
setattr(self, name, value)
|
|
1121
|
+
return self
|
|
1122
|
+
|
|
1123
|
+
def update(
|
|
1124
|
+
self,
|
|
1125
|
+
name: Optional[str] = None,
|
|
1126
|
+
firewall_ranges: Optional[List[str]] = None,
|
|
1127
|
+
admin_password: Optional[str] = None,
|
|
1128
|
+
expires_at: Optional[str] = None,
|
|
1129
|
+
allow_all_traffic: Optional[bool] = None,
|
|
1130
|
+
update_window: Optional[Dict[str, int]] = None,
|
|
1131
|
+
) -> None:
|
|
1132
|
+
"""
|
|
1133
|
+
Update the workspace group definition.
|
|
1134
|
+
|
|
1135
|
+
Parameters
|
|
1136
|
+
----------
|
|
1137
|
+
name : str, optional
|
|
1138
|
+
Name of the workspace group
|
|
1139
|
+
firewall_ranges : list[str], optional
|
|
1140
|
+
List of allowed CIDR ranges. An empty list indicates that all
|
|
1141
|
+
inbound requests are allowed.
|
|
1142
|
+
admin_password : str, optional
|
|
1143
|
+
Admin password for the workspace group. If no password is supplied,
|
|
1144
|
+
a password will be generated and retured in the response.
|
|
1145
|
+
expires_at : str, optional
|
|
1146
|
+
The timestamp of when the workspace group will expire.
|
|
1147
|
+
If the expiration time is not specified,
|
|
1148
|
+
the workspace group will have no expiration time.
|
|
1149
|
+
At expiration, the workspace group is terminated and all the data is lost.
|
|
1150
|
+
Expiration time can be specified as a timestamp or duration.
|
|
1151
|
+
Example: "2021-01-02T15:04:05Z07:00", "2021-01-02", "3h30m"
|
|
1152
|
+
allow_all_traffic : bool, optional
|
|
1153
|
+
Allow all traffic to the workspace group
|
|
1154
|
+
update_window : Dict[str, int], optional
|
|
1155
|
+
Specify the day and hour of an update window: dict(day=0-6, hour=0-23)
|
|
1156
|
+
|
|
1157
|
+
"""
|
|
1158
|
+
if self._manager is None:
|
|
1159
|
+
raise ManagementError(
|
|
1160
|
+
msg='No workspace manager is associated with this object.',
|
|
1161
|
+
)
|
|
1162
|
+
data = {
|
|
1163
|
+
k: v for k, v in dict(
|
|
1164
|
+
name=name,
|
|
1165
|
+
firewallRanges=firewall_ranges,
|
|
1166
|
+
adminPassword=admin_password,
|
|
1167
|
+
expiresAt=expires_at,
|
|
1168
|
+
allowAllTraffic=allow_all_traffic,
|
|
1169
|
+
updateWindow=snake_to_camel_dict(update_window),
|
|
1170
|
+
).items() if v is not None
|
|
1171
|
+
}
|
|
1172
|
+
self._manager._patch(f'workspaceGroups/{self.id}', json=data)
|
|
1173
|
+
self.refresh()
|
|
1174
|
+
|
|
1175
|
+
def terminate(
|
|
1176
|
+
self, force: bool = False,
|
|
1177
|
+
wait_on_terminated: bool = False,
|
|
1178
|
+
wait_interval: int = 10,
|
|
1179
|
+
wait_timeout: int = 600,
|
|
1180
|
+
) -> None:
|
|
1181
|
+
"""
|
|
1182
|
+
Terminate the workspace group.
|
|
1183
|
+
|
|
1184
|
+
Parameters
|
|
1185
|
+
----------
|
|
1186
|
+
force : bool, optional
|
|
1187
|
+
Terminate a workspace group even if it has active workspaces
|
|
1188
|
+
wait_on_terminated : bool, optional
|
|
1189
|
+
Wait for the workspace group to go into 'Terminated' mode before returning
|
|
1190
|
+
wait_interval : int, optional
|
|
1191
|
+
Number of seconds between each server check
|
|
1192
|
+
wait_timeout : int, optional
|
|
1193
|
+
Total number of seconds to check server before giving up
|
|
1194
|
+
|
|
1195
|
+
Raises
|
|
1196
|
+
------
|
|
1197
|
+
ManagementError
|
|
1198
|
+
If timeout is reached
|
|
1199
|
+
|
|
1200
|
+
"""
|
|
1201
|
+
if self._manager is None:
|
|
1202
|
+
raise ManagementError(
|
|
1203
|
+
msg='No workspace manager is associated with this object.',
|
|
1204
|
+
)
|
|
1205
|
+
self._manager._delete(f'workspaceGroups/{self.id}', params=dict(force=force))
|
|
1206
|
+
if wait_on_terminated:
|
|
1207
|
+
while True:
|
|
1208
|
+
self.refresh()
|
|
1209
|
+
if self.terminated_at is not None:
|
|
1210
|
+
break
|
|
1211
|
+
if wait_timeout <= 0:
|
|
1212
|
+
raise ManagementError(
|
|
1213
|
+
msg='Exceeded waiting time for WorkspaceGroup to terminate',
|
|
1214
|
+
)
|
|
1215
|
+
time.sleep(wait_interval)
|
|
1216
|
+
wait_timeout -= wait_interval
|
|
1217
|
+
|
|
1218
|
+
def create_workspace(
|
|
1219
|
+
self,
|
|
1220
|
+
name: str,
|
|
1221
|
+
size: Optional[str] = None,
|
|
1222
|
+
auto_suspend: Optional[Dict[str, Any]] = None,
|
|
1223
|
+
cache_config: Optional[int] = None,
|
|
1224
|
+
enable_kai: Optional[bool] = None,
|
|
1225
|
+
wait_on_active: bool = False,
|
|
1226
|
+
wait_interval: int = 10,
|
|
1227
|
+
wait_timeout: int = 600,
|
|
1228
|
+
) -> Workspace:
|
|
1229
|
+
"""
|
|
1230
|
+
Create a new workspace.
|
|
1231
|
+
|
|
1232
|
+
Parameters
|
|
1233
|
+
----------
|
|
1234
|
+
name : str
|
|
1235
|
+
Name of the workspace
|
|
1236
|
+
size : str, optional
|
|
1237
|
+
Workspace size in workspace size notation (S-00, S-1, etc.)
|
|
1238
|
+
auto_suspend : Dict[str, Any], optional
|
|
1239
|
+
Auto suspend settings for the workspace. If this field is not
|
|
1240
|
+
provided, no settings will be enabled.
|
|
1241
|
+
cache_config : int, optional
|
|
1242
|
+
Specifies the multiplier for the persistent cache associated
|
|
1243
|
+
with the workspace. If specified, it enables the cache configuration
|
|
1244
|
+
multiplier. It can have one of the following values: 1, 2, or 4.
|
|
1245
|
+
enable_kai : bool, optional
|
|
1246
|
+
Whether to create a SingleStore Kai-enabled workspace
|
|
1247
|
+
wait_on_active : bool, optional
|
|
1248
|
+
Wait for the workspace to be active before returning
|
|
1249
|
+
wait_timeout : int, optional
|
|
1250
|
+
Maximum number of seconds to wait before raising an exception
|
|
1251
|
+
if wait=True
|
|
1252
|
+
wait_interval : int, optional
|
|
1253
|
+
Number of seconds between each polling interval
|
|
1254
|
+
|
|
1255
|
+
Returns
|
|
1256
|
+
-------
|
|
1257
|
+
:class:`Workspace`
|
|
1258
|
+
|
|
1259
|
+
"""
|
|
1260
|
+
if self._manager is None:
|
|
1261
|
+
raise ManagementError(
|
|
1262
|
+
msg='No workspace manager is associated with this object.',
|
|
1263
|
+
)
|
|
1264
|
+
|
|
1265
|
+
out = self._manager.create_workspace(
|
|
1266
|
+
name=name,
|
|
1267
|
+
workspace_group=self,
|
|
1268
|
+
size=size,
|
|
1269
|
+
auto_suspend=snake_to_camel_dict(auto_suspend),
|
|
1270
|
+
cache_config=cache_config,
|
|
1271
|
+
enable_kai=enable_kai,
|
|
1272
|
+
wait_on_active=wait_on_active,
|
|
1273
|
+
wait_interval=wait_interval,
|
|
1274
|
+
wait_timeout=wait_timeout,
|
|
1275
|
+
)
|
|
1276
|
+
|
|
1277
|
+
return out
|
|
1278
|
+
|
|
1279
|
+
@property
|
|
1280
|
+
def workspaces(self) -> NamedList[Workspace]:
|
|
1281
|
+
"""Return a list of available workspaces."""
|
|
1282
|
+
if self._manager is None:
|
|
1283
|
+
raise ManagementError(
|
|
1284
|
+
msg='No workspace manager is associated with this object.',
|
|
1285
|
+
)
|
|
1286
|
+
res = self._manager._get('workspaces', params=dict(workspaceGroupID=self.id))
|
|
1287
|
+
return NamedList(
|
|
1288
|
+
[Workspace.from_dict(item, self._manager) for item in res.json()],
|
|
1289
|
+
)
|
|
1290
|
+
|
|
1291
|
+
|
|
1292
|
+
class StarterWorkspace(object):
|
|
1293
|
+
"""
|
|
1294
|
+
SingleStoreDB starter workspace definition.
|
|
1295
|
+
|
|
1296
|
+
This object is not instantiated directly. It is used in the results
|
|
1297
|
+
of API calls on the :class:`WorkspaceManager`. Existing starter workspaces are
|
|
1298
|
+
accessed by either :attr:`WorkspaceManager.starter_workspaces` or by calling
|
|
1299
|
+
:meth:`WorkspaceManager.get_starter_workspace`.
|
|
1300
|
+
|
|
1301
|
+
See Also
|
|
1302
|
+
--------
|
|
1303
|
+
:meth:`WorkspaceManager.get_starter_workspace`
|
|
1304
|
+
:meth:`WorkspaceManager.create_starter_workspace`
|
|
1305
|
+
:meth:`WorkspaceManager.terminate_starter_workspace`
|
|
1306
|
+
:meth:`WorkspaceManager.create_starter_workspace_user`
|
|
1307
|
+
:attr:`WorkspaceManager.starter_workspaces`
|
|
1308
|
+
|
|
1309
|
+
"""
|
|
1310
|
+
|
|
1311
|
+
name: str
|
|
1312
|
+
id: str
|
|
1313
|
+
database_name: str
|
|
1314
|
+
endpoint: Optional[str]
|
|
1315
|
+
|
|
1316
|
+
def __init__(
|
|
1317
|
+
self,
|
|
1318
|
+
name: str,
|
|
1319
|
+
id: str,
|
|
1320
|
+
database_name: str,
|
|
1321
|
+
endpoint: Optional[str] = None,
|
|
1322
|
+
):
|
|
1323
|
+
#: Name of the starter workspace
|
|
1324
|
+
self.name = name
|
|
1325
|
+
|
|
1326
|
+
#: Unique ID of the starter workspace
|
|
1327
|
+
self.id = id
|
|
1328
|
+
|
|
1329
|
+
#: Name of the database associated with the starter workspace
|
|
1330
|
+
self.database_name = database_name
|
|
1331
|
+
|
|
1332
|
+
#: Endpoint to connect to the starter workspace. The endpoint is in the form
|
|
1333
|
+
#: of ``hostname:port``
|
|
1334
|
+
self.endpoint = endpoint
|
|
1335
|
+
|
|
1336
|
+
self._manager: Optional[WorkspaceManager] = None
|
|
1337
|
+
|
|
1338
|
+
def __str__(self) -> str:
|
|
1339
|
+
"""Return string representation."""
|
|
1340
|
+
return vars_to_str(self)
|
|
1341
|
+
|
|
1342
|
+
def __repr__(self) -> str:
|
|
1343
|
+
"""Return string representation."""
|
|
1344
|
+
return str(self)
|
|
1345
|
+
|
|
1346
|
+
@classmethod
|
|
1347
|
+
def from_dict(
|
|
1348
|
+
cls, obj: Dict[str, Any], manager: 'WorkspaceManager',
|
|
1349
|
+
) -> 'StarterWorkspace':
|
|
1350
|
+
"""
|
|
1351
|
+
Construct a StarterWorkspace from a dictionary of values.
|
|
1352
|
+
|
|
1353
|
+
Parameters
|
|
1354
|
+
----------
|
|
1355
|
+
obj : dict
|
|
1356
|
+
Dictionary of values
|
|
1357
|
+
manager : WorkspaceManager, optional
|
|
1358
|
+
The WorkspaceManager the StarterWorkspace belongs to
|
|
1359
|
+
|
|
1360
|
+
Returns
|
|
1361
|
+
-------
|
|
1362
|
+
:class:`StarterWorkspace`
|
|
1363
|
+
|
|
1364
|
+
"""
|
|
1365
|
+
out = cls(
|
|
1366
|
+
name=obj['name'],
|
|
1367
|
+
id=obj['virtualWorkspaceID'],
|
|
1368
|
+
database_name=obj['databaseName'],
|
|
1369
|
+
endpoint=obj.get('endpoint'),
|
|
1370
|
+
)
|
|
1371
|
+
out._manager = manager
|
|
1372
|
+
return out
|
|
1373
|
+
|
|
1374
|
+
def connect(self, **kwargs: Any) -> connection.Connection:
|
|
1375
|
+
"""
|
|
1376
|
+
Create a connection to the database server for this starter workspace.
|
|
1377
|
+
|
|
1378
|
+
Parameters
|
|
1379
|
+
----------
|
|
1380
|
+
**kwargs : keyword-arguments, optional
|
|
1381
|
+
Parameters to the SingleStoreDB `connect` function except host
|
|
1382
|
+
and port which are supplied by the starter workspace object
|
|
1383
|
+
|
|
1384
|
+
Returns
|
|
1385
|
+
-------
|
|
1386
|
+
:class:`Connection`
|
|
1387
|
+
|
|
1388
|
+
"""
|
|
1389
|
+
if not self.endpoint:
|
|
1390
|
+
raise ManagementError(
|
|
1391
|
+
msg='An endpoint has not been set in this '
|
|
1392
|
+
'starter workspace configuration',
|
|
1393
|
+
)
|
|
1394
|
+
|
|
1395
|
+
kwargs['host'] = self.endpoint
|
|
1396
|
+
kwargs['database'] = self.database_name
|
|
1397
|
+
|
|
1398
|
+
return connection.connect(**kwargs)
|
|
1399
|
+
|
|
1400
|
+
def terminate(self) -> None:
|
|
1401
|
+
"""Terminate the starter workspace."""
|
|
1402
|
+
if self._manager is None:
|
|
1403
|
+
raise ManagementError(
|
|
1404
|
+
msg='No workspace manager is associated with this object.',
|
|
1405
|
+
)
|
|
1406
|
+
self._manager._delete(f'sharedtier/virtualWorkspaces/{self.id}')
|
|
1407
|
+
|
|
1408
|
+
def refresh(self) -> StarterWorkspace:
|
|
1409
|
+
"""Update the object to the current state."""
|
|
1410
|
+
if self._manager is None:
|
|
1411
|
+
raise ManagementError(
|
|
1412
|
+
msg='No workspace manager is associated with this object.',
|
|
1413
|
+
)
|
|
1414
|
+
new_obj = self._manager.get_starter_workspace(self.id)
|
|
1415
|
+
for name, value in vars(new_obj).items():
|
|
1416
|
+
if isinstance(value, Mapping):
|
|
1417
|
+
setattr(self, name, snake_to_camel_dict(value))
|
|
1418
|
+
else:
|
|
1419
|
+
setattr(self, name, value)
|
|
1420
|
+
return self
|
|
1421
|
+
|
|
1422
|
+
@property
|
|
1423
|
+
def organization(self) -> Organization:
|
|
1424
|
+
if self._manager is None:
|
|
1425
|
+
raise ManagementError(
|
|
1426
|
+
msg='No workspace manager is associated with this object.',
|
|
1427
|
+
)
|
|
1428
|
+
return self._manager.organization
|
|
1429
|
+
|
|
1430
|
+
@property
|
|
1431
|
+
def stage(self) -> Stage:
|
|
1432
|
+
"""Stage manager."""
|
|
1433
|
+
if self._manager is None:
|
|
1434
|
+
raise ManagementError(
|
|
1435
|
+
msg='No workspace manager is associated with this object.',
|
|
1436
|
+
)
|
|
1437
|
+
return Stage(self.id, self._manager)
|
|
1438
|
+
|
|
1439
|
+
stages = stage
|
|
1440
|
+
|
|
1441
|
+
@property
|
|
1442
|
+
def starter_workspaces(self) -> NamedList['StarterWorkspace']:
|
|
1443
|
+
"""Return a list of available starter workspaces."""
|
|
1444
|
+
if self._manager is None:
|
|
1445
|
+
raise ManagementError(
|
|
1446
|
+
msg='No workspace manager is associated with this object.',
|
|
1447
|
+
)
|
|
1448
|
+
res = self._manager._get('sharedtier/virtualWorkspaces')
|
|
1449
|
+
return NamedList(
|
|
1450
|
+
[StarterWorkspace.from_dict(item, self._manager) for item in res.json()],
|
|
1451
|
+
)
|
|
1452
|
+
|
|
1453
|
+
def create_user(
|
|
1454
|
+
self,
|
|
1455
|
+
username: str,
|
|
1456
|
+
password: Optional[str] = None,
|
|
1457
|
+
) -> Dict[str, str]:
|
|
1458
|
+
"""
|
|
1459
|
+
Create a new user for this starter workspace.
|
|
1460
|
+
|
|
1461
|
+
Parameters
|
|
1462
|
+
----------
|
|
1463
|
+
username : str
|
|
1464
|
+
The starter workspace user name to connect the new user to the database
|
|
1465
|
+
password : str, optional
|
|
1466
|
+
Password for the new user. If not provided, a password will be
|
|
1467
|
+
auto-generated by the system.
|
|
1468
|
+
|
|
1469
|
+
Returns
|
|
1470
|
+
-------
|
|
1471
|
+
Dict[str, str]
|
|
1472
|
+
Dictionary containing 'userID' and 'password' of the created user
|
|
1473
|
+
|
|
1474
|
+
Raises
|
|
1475
|
+
------
|
|
1476
|
+
ManagementError
|
|
1477
|
+
If no workspace manager is associated with this object.
|
|
1478
|
+
"""
|
|
1479
|
+
if self._manager is None:
|
|
1480
|
+
raise ManagementError(
|
|
1481
|
+
msg='No workspace manager is associated with this object.',
|
|
1482
|
+
)
|
|
1483
|
+
|
|
1484
|
+
payload = {
|
|
1485
|
+
'userName': username,
|
|
1486
|
+
}
|
|
1487
|
+
if password is not None:
|
|
1488
|
+
payload['password'] = password
|
|
1489
|
+
|
|
1490
|
+
res = self._manager._post(
|
|
1491
|
+
f'sharedtier/virtualWorkspaces/{self.id}/users',
|
|
1492
|
+
json=payload,
|
|
1493
|
+
)
|
|
1494
|
+
|
|
1495
|
+
response_data = res.json()
|
|
1496
|
+
user_id = response_data.get('userID')
|
|
1497
|
+
if not user_id:
|
|
1498
|
+
raise ManagementError(msg='No userID returned from API')
|
|
1499
|
+
|
|
1500
|
+
# Return the password provided by user or generated by API
|
|
1501
|
+
returned_password = password if password is not None \
|
|
1502
|
+
else response_data.get('password')
|
|
1503
|
+
if not returned_password:
|
|
1504
|
+
raise ManagementError(msg='No password available from API response')
|
|
1505
|
+
|
|
1506
|
+
return {
|
|
1507
|
+
'user_id': user_id,
|
|
1508
|
+
'password': returned_password,
|
|
1509
|
+
}
|
|
1510
|
+
|
|
1511
|
+
|
|
1512
|
+
class Billing(object):
|
|
1513
|
+
"""Billing information."""
|
|
1514
|
+
|
|
1515
|
+
COMPUTE_CREDIT = 'compute_credit'
|
|
1516
|
+
STORAGE_AVG_BYTE = 'storage_avg_byte'
|
|
1517
|
+
|
|
1518
|
+
HOUR = 'hour'
|
|
1519
|
+
DAY = 'day'
|
|
1520
|
+
MONTH = 'month'
|
|
1521
|
+
|
|
1522
|
+
def __init__(self, manager: Manager):
|
|
1523
|
+
self._manager = manager
|
|
1524
|
+
|
|
1525
|
+
def usage(
|
|
1526
|
+
self,
|
|
1527
|
+
start_time: datetime.datetime,
|
|
1528
|
+
end_time: datetime.datetime,
|
|
1529
|
+
metric: Optional[str] = None,
|
|
1530
|
+
aggregate_by: Optional[str] = None,
|
|
1531
|
+
) -> List[BillingUsageItem]:
|
|
1532
|
+
"""
|
|
1533
|
+
Get usage information.
|
|
1534
|
+
|
|
1535
|
+
Parameters
|
|
1536
|
+
----------
|
|
1537
|
+
start_time : datetime.datetime
|
|
1538
|
+
Start time for usage interval
|
|
1539
|
+
end_time : datetime.datetime
|
|
1540
|
+
End time for usage interval
|
|
1541
|
+
metric : str, optional
|
|
1542
|
+
Possible metrics are ``mgr.billing.COMPUTE_CREDIT`` and
|
|
1543
|
+
``mgr.billing.STORAGE_AVG_BYTE`` (default is all)
|
|
1544
|
+
aggregate_by : str, optional
|
|
1545
|
+
Aggregate type used to group usage: ``mgr.billing.HOUR``,
|
|
1546
|
+
``mgr.billing.DAY``, or ``mgr.billing.MONTH``
|
|
1547
|
+
|
|
1548
|
+
Returns
|
|
1549
|
+
-------
|
|
1550
|
+
List[BillingUsage]
|
|
1551
|
+
|
|
1552
|
+
"""
|
|
1553
|
+
res = self._manager._get(
|
|
1554
|
+
'billing/usage',
|
|
1555
|
+
params={
|
|
1556
|
+
k: v for k, v in dict(
|
|
1557
|
+
metric=snake_to_camel(metric),
|
|
1558
|
+
startTime=from_datetime(start_time),
|
|
1559
|
+
endTime=from_datetime(end_time),
|
|
1560
|
+
aggregate_by=aggregate_by.lower() if aggregate_by else None,
|
|
1561
|
+
).items() if v is not None
|
|
1562
|
+
},
|
|
1563
|
+
)
|
|
1564
|
+
return [
|
|
1565
|
+
BillingUsageItem.from_dict(x, self._manager)
|
|
1566
|
+
for x in res.json()['billingUsage']
|
|
1567
|
+
]
|
|
1568
|
+
|
|
1569
|
+
|
|
1570
|
+
class Organizations(object):
|
|
1571
|
+
"""Organizations."""
|
|
1572
|
+
|
|
1573
|
+
def __init__(self, manager: Manager):
|
|
1574
|
+
self._manager = manager
|
|
1575
|
+
|
|
1576
|
+
@property
|
|
1577
|
+
def current(self) -> Organization:
|
|
1578
|
+
"""Get current organization."""
|
|
1579
|
+
res = self._manager._get('organizations/current').json()
|
|
1580
|
+
return Organization.from_dict(res, self._manager)
|
|
1581
|
+
|
|
1582
|
+
|
|
1583
|
+
class WorkspaceManager(Manager):
|
|
1584
|
+
"""
|
|
1585
|
+
SingleStoreDB workspace manager.
|
|
1586
|
+
|
|
1587
|
+
This class should be instantiated using :func:`singlestoredb.manage_workspaces`.
|
|
1588
|
+
|
|
1589
|
+
Parameters
|
|
1590
|
+
----------
|
|
1591
|
+
access_token : str, optional
|
|
1592
|
+
The API key or other access token for the workspace management API
|
|
1593
|
+
version : str, optional
|
|
1594
|
+
Version of the API to use
|
|
1595
|
+
base_url : str, optional
|
|
1596
|
+
Base URL of the workspace management API
|
|
1597
|
+
|
|
1598
|
+
See Also
|
|
1599
|
+
--------
|
|
1600
|
+
:func:`singlestoredb.manage_workspaces`
|
|
1601
|
+
|
|
1602
|
+
"""
|
|
1603
|
+
|
|
1604
|
+
#: Workspace management API version if none is specified.
|
|
1605
|
+
default_version = config.get_option('management.version') or 'v1'
|
|
1606
|
+
|
|
1607
|
+
#: Base URL if none is specified.
|
|
1608
|
+
default_base_url = config.get_option('management.base_url') \
|
|
1609
|
+
or 'https://api.singlestore.com'
|
|
1610
|
+
|
|
1611
|
+
#: Object type
|
|
1612
|
+
obj_type = 'workspace'
|
|
1613
|
+
|
|
1614
|
+
@property
|
|
1615
|
+
def workspace_groups(self) -> NamedList[WorkspaceGroup]:
|
|
1616
|
+
"""Return a list of available workspace groups."""
|
|
1617
|
+
res = self._get('workspaceGroups')
|
|
1618
|
+
return NamedList([WorkspaceGroup.from_dict(item, self) for item in res.json()])
|
|
1619
|
+
|
|
1620
|
+
@property
|
|
1621
|
+
def starter_workspaces(self) -> NamedList[StarterWorkspace]:
|
|
1622
|
+
"""Return a list of available starter workspaces."""
|
|
1623
|
+
res = self._get('sharedtier/virtualWorkspaces')
|
|
1624
|
+
return NamedList([StarterWorkspace.from_dict(item, self) for item in res.json()])
|
|
1625
|
+
|
|
1626
|
+
@property
|
|
1627
|
+
def organizations(self) -> Organizations:
|
|
1628
|
+
"""Return the organizations."""
|
|
1629
|
+
return Organizations(self)
|
|
1630
|
+
|
|
1631
|
+
@property
|
|
1632
|
+
def organization(self) -> Organization:
|
|
1633
|
+
""" Return the current organization."""
|
|
1634
|
+
return self.organizations.current
|
|
1635
|
+
|
|
1636
|
+
@property
|
|
1637
|
+
def billing(self) -> Billing:
|
|
1638
|
+
"""Return the current billing information."""
|
|
1639
|
+
return Billing(self)
|
|
1640
|
+
|
|
1641
|
+
@ttl_property(datetime.timedelta(hours=1))
|
|
1642
|
+
def regions(self) -> NamedList[Region]:
|
|
1643
|
+
"""Return a list of available regions."""
|
|
1644
|
+
res = self._get('regions')
|
|
1645
|
+
return NamedList([Region.from_dict(item, self) for item in res.json()])
|
|
1646
|
+
|
|
1647
|
+
@ttl_property(datetime.timedelta(hours=1))
|
|
1648
|
+
def shared_tier_regions(self) -> NamedList[Region]:
|
|
1649
|
+
"""Return a list of regions that support shared tier workspaces."""
|
|
1650
|
+
res = self._get('regions/sharedtier')
|
|
1651
|
+
return NamedList(
|
|
1652
|
+
[Region.from_dict(item, self) for item in res.json()],
|
|
1653
|
+
)
|
|
1654
|
+
|
|
1655
|
+
def create_workspace_group(
|
|
1656
|
+
self,
|
|
1657
|
+
name: str,
|
|
1658
|
+
region: Union[str, Region],
|
|
1659
|
+
firewall_ranges: List[str],
|
|
1660
|
+
admin_password: Optional[str] = None,
|
|
1661
|
+
backup_bucket_kms_key_id: Optional[str] = None,
|
|
1662
|
+
data_bucket_kms_key_id: Optional[str] = None,
|
|
1663
|
+
expires_at: Optional[str] = None,
|
|
1664
|
+
smart_dr: Optional[bool] = None,
|
|
1665
|
+
allow_all_traffic: Optional[bool] = None,
|
|
1666
|
+
update_window: Optional[Dict[str, int]] = None,
|
|
1667
|
+
) -> WorkspaceGroup:
|
|
1668
|
+
"""
|
|
1669
|
+
Create a new workspace group.
|
|
1670
|
+
|
|
1671
|
+
Parameters
|
|
1672
|
+
----------
|
|
1673
|
+
name : str
|
|
1674
|
+
Name of the workspace group
|
|
1675
|
+
region : str or Region
|
|
1676
|
+
ID of the region where the workspace group should be created
|
|
1677
|
+
firewall_ranges : list[str]
|
|
1678
|
+
List of allowed CIDR ranges. An empty list indicates that all
|
|
1679
|
+
inbound requests are allowed.
|
|
1680
|
+
admin_password : str, optional
|
|
1681
|
+
Admin password for the workspace group. If no password is supplied,
|
|
1682
|
+
a password will be generated and retured in the response.
|
|
1683
|
+
backup_bucket_kms_key_id : str, optional
|
|
1684
|
+
Specifies the KMS key ID associated with the backup bucket.
|
|
1685
|
+
If specified, enables Customer-Managed Encryption Keys (CMEK)
|
|
1686
|
+
encryption for the backup bucket of the workspace group.
|
|
1687
|
+
This feature is only supported in workspace groups deployed in AWS.
|
|
1688
|
+
data_bucket_kms_key_id : str, optional
|
|
1689
|
+
Specifies the KMS key ID associated with the data bucket.
|
|
1690
|
+
If specified, enables Customer-Managed Encryption Keys (CMEK)
|
|
1691
|
+
encryption for the data bucket and Amazon Elastic Block Store
|
|
1692
|
+
(EBS) volumes of the workspace group. This feature is only supported
|
|
1693
|
+
in workspace groups deployed in AWS.
|
|
1694
|
+
expires_at : str, optional
|
|
1695
|
+
The timestamp of when the workspace group will expire.
|
|
1696
|
+
If the expiration time is not specified,
|
|
1697
|
+
the workspace group will have no expiration time.
|
|
1698
|
+
At expiration, the workspace group is terminated and all the data is lost.
|
|
1699
|
+
Expiration time can be specified as a timestamp or duration.
|
|
1700
|
+
Example: "2021-01-02T15:04:05Z07:00", "2021-01-02", "3h30m"
|
|
1701
|
+
smart_dr : bool, optional
|
|
1702
|
+
Enables Smart Disaster Recovery (SmartDR) for the workspace group.
|
|
1703
|
+
SmartDR is a disaster recovery solution that ensures seamless and
|
|
1704
|
+
continuous replication of data from the primary region to a secondary region
|
|
1705
|
+
allow_all_traffic : bool, optional
|
|
1706
|
+
Allow all traffic to the workspace group
|
|
1707
|
+
update_window : Dict[str, int], optional
|
|
1708
|
+
Specify the day and hour of an update window: dict(day=0-6, hour=0-23)
|
|
1709
|
+
|
|
1710
|
+
Returns
|
|
1711
|
+
-------
|
|
1712
|
+
:class:`WorkspaceGroup`
|
|
1713
|
+
|
|
1714
|
+
"""
|
|
1715
|
+
if isinstance(region, Region) and region.id:
|
|
1716
|
+
region = region.id
|
|
1717
|
+
res = self._post(
|
|
1718
|
+
'workspaceGroups', json=dict(
|
|
1719
|
+
name=name, regionID=region,
|
|
1720
|
+
adminPassword=admin_password,
|
|
1721
|
+
backupBucketKMSKeyID=backup_bucket_kms_key_id,
|
|
1722
|
+
dataBucketKMSKeyID=data_bucket_kms_key_id,
|
|
1723
|
+
firewallRanges=firewall_ranges or [],
|
|
1724
|
+
expiresAt=expires_at,
|
|
1725
|
+
smartDR=smart_dr,
|
|
1726
|
+
allowAllTraffic=allow_all_traffic,
|
|
1727
|
+
updateWindow=snake_to_camel_dict(update_window),
|
|
1728
|
+
),
|
|
1729
|
+
)
|
|
1730
|
+
return self.get_workspace_group(res.json()['workspaceGroupID'])
|
|
1731
|
+
|
|
1732
|
+
def create_workspace(
|
|
1733
|
+
self,
|
|
1734
|
+
name: str,
|
|
1735
|
+
workspace_group: Union[str, WorkspaceGroup],
|
|
1736
|
+
size: Optional[str] = None,
|
|
1737
|
+
auto_suspend: Optional[Dict[str, Any]] = None,
|
|
1738
|
+
cache_config: Optional[int] = None,
|
|
1739
|
+
enable_kai: Optional[bool] = None,
|
|
1740
|
+
wait_on_active: bool = False,
|
|
1741
|
+
wait_interval: int = 10,
|
|
1742
|
+
wait_timeout: int = 600,
|
|
1743
|
+
) -> Workspace:
|
|
1744
|
+
"""
|
|
1745
|
+
Create a new workspace.
|
|
1746
|
+
|
|
1747
|
+
Parameters
|
|
1748
|
+
----------
|
|
1749
|
+
name : str
|
|
1750
|
+
Name of the workspace
|
|
1751
|
+
workspace_group : str or WorkspaceGroup
|
|
1752
|
+
The workspace ID of the workspace
|
|
1753
|
+
size : str, optional
|
|
1754
|
+
Workspace size in workspace size notation (S-00, S-1, etc.)
|
|
1755
|
+
auto_suspend : Dict[str, Any], optional
|
|
1756
|
+
Auto suspend settings for the workspace. If this field is not
|
|
1757
|
+
provided, no settings will be enabled.
|
|
1758
|
+
cache_config : int, optional
|
|
1759
|
+
Specifies the multiplier for the persistent cache associated
|
|
1760
|
+
with the workspace. If specified, it enables the cache configuration
|
|
1761
|
+
multiplier. It can have one of the following values: 1, 2, or 4.
|
|
1762
|
+
enable_kai : bool, optional
|
|
1763
|
+
Whether to create a SingleStore Kai-enabled workspace
|
|
1764
|
+
wait_on_active : bool, optional
|
|
1765
|
+
Wait for the workspace to be active before returning
|
|
1766
|
+
wait_timeout : int, optional
|
|
1767
|
+
Maximum number of seconds to wait before raising an exception
|
|
1768
|
+
if wait=True
|
|
1769
|
+
wait_interval : int, optional
|
|
1770
|
+
Number of seconds between each polling interval
|
|
1771
|
+
|
|
1772
|
+
Returns
|
|
1773
|
+
-------
|
|
1774
|
+
:class:`Workspace`
|
|
1775
|
+
|
|
1776
|
+
"""
|
|
1777
|
+
if isinstance(workspace_group, WorkspaceGroup):
|
|
1778
|
+
workspace_group = workspace_group.id
|
|
1779
|
+
res = self._post(
|
|
1780
|
+
'workspaces', json=dict(
|
|
1781
|
+
name=name,
|
|
1782
|
+
workspaceGroupID=workspace_group,
|
|
1783
|
+
size=size,
|
|
1784
|
+
autoSuspend=snake_to_camel_dict(auto_suspend),
|
|
1785
|
+
cacheConfig=cache_config,
|
|
1786
|
+
enableKai=enable_kai,
|
|
1787
|
+
),
|
|
1788
|
+
)
|
|
1789
|
+
out = self.get_workspace(res.json()['workspaceID'])
|
|
1790
|
+
if wait_on_active:
|
|
1791
|
+
out = self._wait_on_state(
|
|
1792
|
+
out,
|
|
1793
|
+
'Active',
|
|
1794
|
+
interval=wait_interval,
|
|
1795
|
+
timeout=wait_timeout,
|
|
1796
|
+
)
|
|
1797
|
+
# After workspace is active, wait for endpoint to be ready
|
|
1798
|
+
out = self._wait_on_endpoint(
|
|
1799
|
+
out,
|
|
1800
|
+
interval=wait_interval,
|
|
1801
|
+
timeout=wait_timeout,
|
|
1802
|
+
)
|
|
1803
|
+
return out
|
|
1804
|
+
|
|
1805
|
+
def get_workspace_group(self, id: str) -> WorkspaceGroup:
|
|
1806
|
+
"""
|
|
1807
|
+
Retrieve a workspace group definition.
|
|
1808
|
+
|
|
1809
|
+
Parameters
|
|
1810
|
+
----------
|
|
1811
|
+
id : str
|
|
1812
|
+
ID of the workspace group
|
|
1813
|
+
|
|
1814
|
+
Returns
|
|
1815
|
+
-------
|
|
1816
|
+
:class:`WorkspaceGroup`
|
|
1817
|
+
|
|
1818
|
+
"""
|
|
1819
|
+
res = self._get(f'workspaceGroups/{id}')
|
|
1820
|
+
return WorkspaceGroup.from_dict(res.json(), manager=self)
|
|
1821
|
+
|
|
1822
|
+
def get_workspace(self, id: str) -> Workspace:
|
|
1823
|
+
"""
|
|
1824
|
+
Retrieve a workspace definition.
|
|
1825
|
+
|
|
1826
|
+
Parameters
|
|
1827
|
+
----------
|
|
1828
|
+
id : str
|
|
1829
|
+
ID of the workspace
|
|
1830
|
+
|
|
1831
|
+
Returns
|
|
1832
|
+
-------
|
|
1833
|
+
:class:`Workspace`
|
|
1834
|
+
|
|
1835
|
+
"""
|
|
1836
|
+
res = self._get(f'workspaces/{id}')
|
|
1837
|
+
return Workspace.from_dict(res.json(), manager=self)
|
|
1838
|
+
|
|
1839
|
+
def get_starter_workspace(self, id: str) -> StarterWorkspace:
|
|
1840
|
+
"""
|
|
1841
|
+
Retrieve a starter workspace definition.
|
|
1842
|
+
|
|
1843
|
+
Parameters
|
|
1844
|
+
----------
|
|
1845
|
+
id : str
|
|
1846
|
+
ID of the starter workspace
|
|
1847
|
+
|
|
1848
|
+
Returns
|
|
1849
|
+
-------
|
|
1850
|
+
:class:`StarterWorkspace`
|
|
1851
|
+
|
|
1852
|
+
"""
|
|
1853
|
+
res = self._get(f'sharedtier/virtualWorkspaces/{id}')
|
|
1854
|
+
return StarterWorkspace.from_dict(res.json(), manager=self)
|
|
1855
|
+
|
|
1856
|
+
def create_starter_workspace(
|
|
1857
|
+
self,
|
|
1858
|
+
name: str,
|
|
1859
|
+
database_name: str,
|
|
1860
|
+
provider: str,
|
|
1861
|
+
region_name: str,
|
|
1862
|
+
) -> 'StarterWorkspace':
|
|
1863
|
+
"""
|
|
1864
|
+
Create a new starter (shared tier) workspace.
|
|
1865
|
+
|
|
1866
|
+
Parameters
|
|
1867
|
+
----------
|
|
1868
|
+
name : str
|
|
1869
|
+
Name of the starter workspace
|
|
1870
|
+
database_name : str
|
|
1871
|
+
Name of the database for the starter workspace
|
|
1872
|
+
provider : str
|
|
1873
|
+
Cloud provider for the starter workspace (e.g., 'aws', 'gcp', 'azure')
|
|
1874
|
+
region_name : str
|
|
1875
|
+
Cloud provider region for the starter workspace (e.g., 'us-east-1')
|
|
1876
|
+
|
|
1877
|
+
Returns
|
|
1878
|
+
-------
|
|
1879
|
+
:class:`StarterWorkspace`
|
|
1880
|
+
"""
|
|
1881
|
+
|
|
1882
|
+
payload = {
|
|
1883
|
+
'name': name,
|
|
1884
|
+
'databaseName': database_name,
|
|
1885
|
+
'provider': provider,
|
|
1886
|
+
'regionName': region_name,
|
|
1887
|
+
}
|
|
1888
|
+
|
|
1889
|
+
res = self._post('sharedtier/virtualWorkspaces', json=payload)
|
|
1890
|
+
virtual_workspace_id = res.json().get('virtualWorkspaceID')
|
|
1891
|
+
if not virtual_workspace_id:
|
|
1892
|
+
raise ManagementError(msg='No virtualWorkspaceID returned from API')
|
|
1893
|
+
|
|
1894
|
+
res = self._get(f'sharedtier/virtualWorkspaces/{virtual_workspace_id}')
|
|
1895
|
+
return StarterWorkspace.from_dict(res.json(), self)
|
|
1896
|
+
|
|
1897
|
+
|
|
1898
|
+
def manage_workspaces(
|
|
1899
|
+
access_token: Optional[str] = None,
|
|
1900
|
+
version: Optional[str] = None,
|
|
1901
|
+
base_url: Optional[str] = None,
|
|
1902
|
+
*,
|
|
1903
|
+
organization_id: Optional[str] = None,
|
|
1904
|
+
) -> WorkspaceManager:
|
|
1905
|
+
"""
|
|
1906
|
+
Retrieve a SingleStoreDB workspace manager.
|
|
1907
|
+
|
|
1908
|
+
Parameters
|
|
1909
|
+
----------
|
|
1910
|
+
access_token : str, optional
|
|
1911
|
+
The API key or other access token for the workspace management API
|
|
1912
|
+
version : str, optional
|
|
1913
|
+
Version of the API to use
|
|
1914
|
+
base_url : str, optional
|
|
1915
|
+
Base URL of the workspace management API
|
|
1916
|
+
organization_id : str, optional
|
|
1917
|
+
ID of organization, if using a JWT for authentication
|
|
1918
|
+
|
|
1919
|
+
Returns
|
|
1920
|
+
-------
|
|
1921
|
+
:class:`WorkspaceManager`
|
|
1922
|
+
|
|
1923
|
+
"""
|
|
1924
|
+
return WorkspaceManager(
|
|
1925
|
+
access_token=access_token, base_url=base_url,
|
|
1926
|
+
version=version, organization_id=organization_id,
|
|
1927
|
+
)
|