singlestoredb 1.7.2__py3-none-any.whl → 1.9.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of singlestoredb might be problematic. Click here for more details.
- singlestoredb/__init__.py +2 -2
- singlestoredb/config.py +6 -0
- singlestoredb/connection.py +11 -0
- singlestoredb/fusion/handler.py +71 -18
- singlestoredb/fusion/handlers/export.py +237 -0
- singlestoredb/fusion/handlers/files.py +690 -0
- singlestoredb/fusion/handlers/job.py +4 -20
- singlestoredb/fusion/handlers/stage.py +103 -91
- singlestoredb/fusion/handlers/utils.py +148 -0
- singlestoredb/management/__init__.py +1 -0
- singlestoredb/management/export.py +146 -0
- singlestoredb/management/files.py +1038 -0
- singlestoredb/management/workspace.py +162 -349
- singlestoredb/mysql/connection.py +9 -1
- singlestoredb/py.typed +0 -0
- singlestoredb/tests/test.ipynb +18 -0
- singlestoredb/tests/test2.ipynb +18 -0
- singlestoredb/tests/test_fusion.py +0 -4
- singlestoredb/tests/test_management.py +273 -1
- {singlestoredb-1.7.2.dist-info → singlestoredb-1.9.0.dist-info}/METADATA +1 -1
- {singlestoredb-1.7.2.dist-info → singlestoredb-1.9.0.dist-info}/RECORD +27 -18
- {singlestoredb-1.7.2.dist-info → singlestoredb-1.9.0.dist-info}/WHEEL +1 -1
- {singlestoredb-1.7.2.dist-info → singlestoredb-1.9.0.dist-info}/top_level.txt +1 -0
- sqlx/__init__.py +4 -0
- sqlx/magic.py +113 -0
- {singlestoredb-1.7.2.dist-info → singlestoredb-1.9.0.dist-info}/LICENSE +0 -0
- {singlestoredb-1.7.2.dist-info → singlestoredb-1.9.0.dist-info}/entry_points.txt +0 -0
singlestoredb/__init__.py
CHANGED
|
@@ -13,7 +13,7 @@ Examples
|
|
|
13
13
|
|
|
14
14
|
"""
|
|
15
15
|
|
|
16
|
-
__version__ = '1.
|
|
16
|
+
__version__ = '1.9.0'
|
|
17
17
|
|
|
18
18
|
from typing import Any
|
|
19
19
|
|
|
@@ -25,7 +25,7 @@ from .exceptions import (
|
|
|
25
25
|
DataError, ManagementError,
|
|
26
26
|
)
|
|
27
27
|
from .management import (
|
|
28
|
-
manage_cluster, manage_workspaces,
|
|
28
|
+
manage_cluster, manage_workspaces, manage_files,
|
|
29
29
|
)
|
|
30
30
|
from .types import (
|
|
31
31
|
Date, Time, Timestamp, DateFromTicks, TimeFromTicks, TimestampFromTicks,
|
singlestoredb/config.py
CHANGED
|
@@ -134,6 +134,12 @@ register_option(
|
|
|
134
134
|
environ='SINGLESTOREDB_SSL_CIPHER',
|
|
135
135
|
)
|
|
136
136
|
|
|
137
|
+
register_option(
|
|
138
|
+
'tls_sni_servername', 'str', check_str, None,
|
|
139
|
+
'Sets TLS SNI servername',
|
|
140
|
+
environ='SINGLESTOREDB_TLS_SNI_SERVERNAME',
|
|
141
|
+
)
|
|
142
|
+
|
|
137
143
|
register_option(
|
|
138
144
|
'ssl_disabled', 'bool', check_bool, False,
|
|
139
145
|
'Disable SSL usage',
|
singlestoredb/connection.py
CHANGED
|
@@ -20,6 +20,7 @@ from typing import Sequence
|
|
|
20
20
|
from typing import Tuple
|
|
21
21
|
from typing import Union
|
|
22
22
|
from urllib.parse import parse_qs
|
|
23
|
+
from urllib.parse import unquote_plus
|
|
23
24
|
from urllib.parse import urlparse
|
|
24
25
|
|
|
25
26
|
import sqlparams
|
|
@@ -284,6 +285,15 @@ def _parse_url(url: str) -> Dict[str, Any]:
|
|
|
284
285
|
if parts.scheme != 'singlestoredb':
|
|
285
286
|
out['driver'] = parts.scheme.lower()
|
|
286
287
|
|
|
288
|
+
if out.get('user'):
|
|
289
|
+
out['user'] = unquote_plus(out['user'])
|
|
290
|
+
|
|
291
|
+
if out.get('password'):
|
|
292
|
+
out['password'] = unquote_plus(out['password'])
|
|
293
|
+
|
|
294
|
+
if out.get('database'):
|
|
295
|
+
out['database'] = unquote_plus(out['database'])
|
|
296
|
+
|
|
287
297
|
# Convert query string to parameters
|
|
288
298
|
out.update({k.lower(): v[-1] for k, v in parse_qs(parts.query).items()})
|
|
289
299
|
|
|
@@ -1288,6 +1298,7 @@ def connect(
|
|
|
1288
1298
|
ssl_key: Optional[str] = None, ssl_cert: Optional[str] = None,
|
|
1289
1299
|
ssl_ca: Optional[str] = None, ssl_disabled: Optional[bool] = None,
|
|
1290
1300
|
ssl_cipher: Optional[str] = None, ssl_verify_cert: Optional[bool] = None,
|
|
1301
|
+
tls_sni_servername: Optional[str] = None,
|
|
1291
1302
|
ssl_verify_identity: Optional[bool] = None,
|
|
1292
1303
|
conv: Optional[Dict[int, Callable[..., Any]]] = None,
|
|
1293
1304
|
credential_type: Optional[str] = None,
|
singlestoredb/fusion/handler.py
CHANGED
|
@@ -11,6 +11,7 @@ from typing import Dict
|
|
|
11
11
|
from typing import Iterable
|
|
12
12
|
from typing import List
|
|
13
13
|
from typing import Optional
|
|
14
|
+
from typing import Set
|
|
14
15
|
from typing import Tuple
|
|
15
16
|
|
|
16
17
|
from parsimonious import Grammar
|
|
@@ -23,9 +24,9 @@ from ..connection import Connection
|
|
|
23
24
|
|
|
24
25
|
CORE_GRAMMAR = r'''
|
|
25
26
|
ws = ~r"(\s+|(\s*/\*.*\*/\s*)+)"
|
|
26
|
-
qs = ~r"\"([^\"]*)\"|'([^\']*)'
|
|
27
|
-
number = ~r"[-+]?(\d*\.)?\d+(e[-+]?\d+)?"i
|
|
28
|
-
integer = ~r"-?\d+"
|
|
27
|
+
qs = ~r"\"([^\"]*)\"|'([^\']*)'|([A-Za-z0-9_\-\.]+)|`([^\`]+)`" ws*
|
|
28
|
+
number = ~r"[-+]?(\d*\.)?\d+(e[-+]?\d+)?"i ws*
|
|
29
|
+
integer = ~r"-?\d+" ws*
|
|
29
30
|
comma = ws* "," ws*
|
|
30
31
|
eq = ws* "=" ws*
|
|
31
32
|
open_paren = ws* "(" ws*
|
|
@@ -33,6 +34,10 @@ CORE_GRAMMAR = r'''
|
|
|
33
34
|
open_repeats = ws* ~r"[\(\[\{]" ws*
|
|
34
35
|
close_repeats = ws* ~r"[\)\]\}]" ws*
|
|
35
36
|
select = ~r"SELECT"i ws+ ~r".+" ws*
|
|
37
|
+
table = ~r"(?:([A-Za-z0-9_\-]+)|`([^\`]+)`)(?:\.(?:([A-Za-z0-9_\-]+)|`([^\`]+)`))?" ws*
|
|
38
|
+
column = ~r"(?:([A-Za-z0-9_\-]+)|`([^\`]+)`)(?:\.(?:([A-Za-z0-9_\-]+)|`([^\`]+)`))?" ws*
|
|
39
|
+
link_name = ~r"(?:([A-Za-z0-9_\-]+)|`([^\`]+)`)(?:\.(?:([A-Za-z0-9_\-]+)|`([^\`]+)`))?" ws*
|
|
40
|
+
catalog_name = ~r"(?:([A-Za-z0-9_\-]+)|`([^\`]+)`)(?:\.(?:([A-Za-z0-9_\-]+)|`([^\`]+)`))?" ws*
|
|
36
41
|
|
|
37
42
|
json = ws* json_object ws*
|
|
38
43
|
json_object = ~r"{\s*" json_members? ~r"\s*}"
|
|
@@ -65,6 +70,13 @@ BUILTINS = {
|
|
|
65
70
|
'<integer>': '',
|
|
66
71
|
'<number>': '',
|
|
67
72
|
'<json>': '',
|
|
73
|
+
'<table>': '',
|
|
74
|
+
'<column>': '',
|
|
75
|
+
'<catalog-name>': '',
|
|
76
|
+
'<link-name>': '',
|
|
77
|
+
'<file-type>': r'''
|
|
78
|
+
file_type = { FILE | FOLDER }
|
|
79
|
+
''',
|
|
68
80
|
}
|
|
69
81
|
|
|
70
82
|
BUILTIN_DEFAULTS = { # type: ignore
|
|
@@ -226,9 +238,13 @@ def build_syntax(grammar: str) -> str:
|
|
|
226
238
|
# Split on ';' on a line by itself
|
|
227
239
|
cmd, end = grammar.split(';', 1)
|
|
228
240
|
|
|
229
|
-
|
|
241
|
+
name = ''
|
|
242
|
+
rules: Dict[str, Any] = {}
|
|
230
243
|
for line in end.split('\n'):
|
|
231
244
|
line = line.strip()
|
|
245
|
+
if line.startswith('&'):
|
|
246
|
+
rules[name] += '\n' + line
|
|
247
|
+
continue
|
|
232
248
|
if not line:
|
|
233
249
|
continue
|
|
234
250
|
name, value = line.split('=', 1)
|
|
@@ -239,10 +255,16 @@ def build_syntax(grammar: str) -> str:
|
|
|
239
255
|
while re.search(r' [a-z0-9_]+\b', cmd):
|
|
240
256
|
cmd = re.sub(r' ([a-z0-9_]+)\b', functools.partial(expand_rules, rules), cmd)
|
|
241
257
|
|
|
258
|
+
def add_indent(m: Any) -> str:
|
|
259
|
+
return ' ' + (len(m.group(1)) * ' ')
|
|
260
|
+
|
|
261
|
+
# Indent line-continuations
|
|
262
|
+
cmd = re.sub(r'^(\&+)\s*', add_indent, cmd, flags=re.M)
|
|
263
|
+
|
|
242
264
|
cmd = textwrap.dedent(cmd).rstrip() + ';'
|
|
243
|
-
cmd = re.sub(r' +', ' ', cmd)
|
|
244
|
-
cmd = re.sub(r'
|
|
245
|
-
cmd = re.sub(r'\s
|
|
265
|
+
cmd = re.sub(r'(\S) +', r'\1 ', cmd)
|
|
266
|
+
cmd = re.sub(r'<comma>', ',', cmd)
|
|
267
|
+
cmd = re.sub(r'\s+,\s*\.\.\.', ',...', cmd)
|
|
246
268
|
|
|
247
269
|
return cmd
|
|
248
270
|
|
|
@@ -399,9 +421,15 @@ def process_grammar(
|
|
|
399
421
|
help_txt = build_help(syntax_txt, full_grammar)
|
|
400
422
|
grammar = build_cmd(grammar)
|
|
401
423
|
|
|
424
|
+
# Remove line-continuations
|
|
425
|
+
grammar = re.sub(r'\n\s*&+', r'', grammar)
|
|
426
|
+
|
|
402
427
|
# Make sure grouping characters all have whitespace around them
|
|
403
428
|
grammar = re.sub(r' *(\[|\{|\||\}|\]) *', r' \1 ', grammar)
|
|
404
429
|
|
|
430
|
+
grammar = re.sub(r'\(', r' open_paren ', grammar)
|
|
431
|
+
grammar = re.sub(r'\)', r' close_paren ', grammar)
|
|
432
|
+
|
|
405
433
|
for line in grammar.split('\n'):
|
|
406
434
|
if not line.strip():
|
|
407
435
|
continue
|
|
@@ -418,7 +446,7 @@ def process_grammar(
|
|
|
418
446
|
sql = re.sub(r'\]\s+\[', r' | ', sql)
|
|
419
447
|
|
|
420
448
|
# Lower-case keywords and make them case-insensitive
|
|
421
|
-
sql = re.sub(r'(\b|@+)([A-Z0-
|
|
449
|
+
sql = re.sub(r'(\b|@+)([A-Z0-9_]+)\b', lower_and_regex, sql)
|
|
422
450
|
|
|
423
451
|
# Convert literal strings to 'qs'
|
|
424
452
|
sql = re.sub(r"'[^']+'", r'qs', sql)
|
|
@@ -461,12 +489,18 @@ def process_grammar(
|
|
|
461
489
|
sql = re.sub(r'\s+ws$', r' ws*', sql)
|
|
462
490
|
sql = re.sub(r'\s+ws\s+\(', r' ws* (', sql)
|
|
463
491
|
sql = re.sub(r'\)\s+ws\s+', r') ws* ', sql)
|
|
464
|
-
sql = re.sub(r'\s+ws\s+', r' ws
|
|
492
|
+
sql = re.sub(r'\s+ws\s+', r' ws* ', sql)
|
|
465
493
|
sql = re.sub(r'\?\s+ws\+', r'? ws*', sql)
|
|
466
494
|
|
|
467
495
|
# Remove extra ws around eq
|
|
468
496
|
sql = re.sub(r'ws\+\s*eq\b', r'eq', sql)
|
|
469
497
|
|
|
498
|
+
# Remove optional groupings when mandatory groupings are specified
|
|
499
|
+
sql = re.sub(r'open_paren\s+ws\*\s+open_repeats\?', r'open_paren', sql)
|
|
500
|
+
sql = re.sub(r'close_repeats\?\s+ws\*\s+close_paren', r'close_paren', sql)
|
|
501
|
+
sql = re.sub(r'open_paren\s+open_repeats\?', r'open_paren', sql)
|
|
502
|
+
sql = re.sub(r'close_repeats\?\s+close_paren', r'close_paren', sql)
|
|
503
|
+
|
|
470
504
|
out.append(f'{op} = {sql}')
|
|
471
505
|
|
|
472
506
|
for k, v in list(rules.items()):
|
|
@@ -548,6 +582,7 @@ class SQLHandler(NodeVisitor):
|
|
|
548
582
|
|
|
549
583
|
def __init__(self, connection: Connection):
|
|
550
584
|
self.connection = connection
|
|
585
|
+
self._handled: Set[str] = set()
|
|
551
586
|
|
|
552
587
|
@classmethod
|
|
553
588
|
def compile(cls, grammar: str = '') -> None:
|
|
@@ -614,12 +649,16 @@ class SQLHandler(NodeVisitor):
|
|
|
614
649
|
)
|
|
615
650
|
|
|
616
651
|
type(self).compile()
|
|
652
|
+
self._handled = set()
|
|
617
653
|
try:
|
|
618
654
|
params = self.visit(type(self).grammar.parse(sql))
|
|
619
655
|
for k, v in params.items():
|
|
620
656
|
params[k] = self.validate_rule(k, v)
|
|
621
657
|
|
|
622
658
|
res = self.run(params)
|
|
659
|
+
|
|
660
|
+
self._handled = set()
|
|
661
|
+
|
|
623
662
|
if res is not None:
|
|
624
663
|
res.format_results(self.connection)
|
|
625
664
|
return res
|
|
@@ -666,16 +705,20 @@ class SQLHandler(NodeVisitor):
|
|
|
666
705
|
"""Quoted strings."""
|
|
667
706
|
if node is None:
|
|
668
707
|
return None
|
|
669
|
-
return
|
|
670
|
-
|
|
708
|
+
return flatten(visited_children)[0]
|
|
709
|
+
|
|
710
|
+
def visit_compound(self, node: Node, visited_children: Iterable[Any]) -> Any:
|
|
711
|
+
"""Compound name."""
|
|
712
|
+
print(visited_children)
|
|
713
|
+
return flatten(visited_children)[0]
|
|
671
714
|
|
|
672
715
|
def visit_number(self, node: Node, visited_children: Iterable[Any]) -> Any:
|
|
673
716
|
"""Numeric value."""
|
|
674
|
-
return float(
|
|
717
|
+
return float(flatten(visited_children)[0])
|
|
675
718
|
|
|
676
719
|
def visit_integer(self, node: Node, visited_children: Iterable[Any]) -> Any:
|
|
677
720
|
"""Integer value."""
|
|
678
|
-
return int(
|
|
721
|
+
return int(flatten(visited_children)[0])
|
|
679
722
|
|
|
680
723
|
def visit_ws(self, node: Node, visited_children: Iterable[Any]) -> Any:
|
|
681
724
|
"""Whitespace and comments."""
|
|
@@ -804,19 +847,29 @@ class SQLHandler(NodeVisitor):
|
|
|
804
847
|
if node.expr_name.endswith('_cmd'):
|
|
805
848
|
final = merge_dicts(flatten(visited_children)[n_keywords:])
|
|
806
849
|
for k, v in type(self).rule_info.items():
|
|
807
|
-
if k.endswith('_cmd') or k.endswith('_'):
|
|
850
|
+
if k.endswith('_cmd') or k.endswith('_') or k.startswith('_'):
|
|
808
851
|
continue
|
|
809
|
-
if k not in final:
|
|
852
|
+
if k not in final and k not in self._handled:
|
|
810
853
|
final[k] = BUILTIN_DEFAULTS.get(k, v['default'])
|
|
811
854
|
return final
|
|
812
855
|
|
|
813
856
|
# Filter out stray empty strings
|
|
814
857
|
out = [x for x in flatten(visited_children)[n_keywords:] if x]
|
|
815
858
|
|
|
816
|
-
|
|
817
|
-
|
|
859
|
+
# Remove underscore prefixes from rule name
|
|
860
|
+
key_name = re.sub(r'^_+', r'', node.expr_name)
|
|
818
861
|
|
|
819
|
-
|
|
862
|
+
if repeats or len(out) > 1:
|
|
863
|
+
self._handled.add(node.expr_name)
|
|
864
|
+
# If all outputs are dicts, merge them
|
|
865
|
+
if len(out) > 1 and not repeats:
|
|
866
|
+
is_dicts = [x for x in out if isinstance(x, dict)]
|
|
867
|
+
if len(is_dicts) == len(out):
|
|
868
|
+
return {key_name: merge_dicts(out)}
|
|
869
|
+
return {key_name: out}
|
|
870
|
+
|
|
871
|
+
self._handled.add(node.expr_name)
|
|
872
|
+
return {key_name: out[0] if out else True}
|
|
820
873
|
|
|
821
874
|
if hasattr(node, 'match'):
|
|
822
875
|
if not visited_children and not node.match.groups():
|
|
@@ -0,0 +1,237 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
import json
|
|
3
|
+
from typing import Any
|
|
4
|
+
from typing import Dict
|
|
5
|
+
from typing import Optional
|
|
6
|
+
|
|
7
|
+
from .. import result
|
|
8
|
+
from ...management.export import ExportService
|
|
9
|
+
from ...management.export import ExportStatus
|
|
10
|
+
from ..handler import SQLHandler
|
|
11
|
+
from ..result import FusionSQLResult
|
|
12
|
+
from .utils import get_workspace_group
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class CreateClusterIdentity(SQLHandler):
|
|
16
|
+
"""
|
|
17
|
+
CREATE CLUSTER IDENTITY
|
|
18
|
+
catalog
|
|
19
|
+
storage
|
|
20
|
+
;
|
|
21
|
+
|
|
22
|
+
# Catolog
|
|
23
|
+
catalog = CATALOG { _catalog_config | _catalog_creds }
|
|
24
|
+
_catalog_config = CONFIG '<catalog-config>'
|
|
25
|
+
_catalog_creds = CREDENTIALS '<catalog-creds>'
|
|
26
|
+
|
|
27
|
+
# Storage
|
|
28
|
+
storage = LINK { _link_config | _link_creds }
|
|
29
|
+
_link_config = S3 CONFIG '<link-config>'
|
|
30
|
+
_link_creds = CREDENTIALS '<link-creds>'
|
|
31
|
+
|
|
32
|
+
Description
|
|
33
|
+
-----------
|
|
34
|
+
Create a cluster identity for allowing the export service to access
|
|
35
|
+
external cloud resources.
|
|
36
|
+
|
|
37
|
+
Arguments
|
|
38
|
+
---------
|
|
39
|
+
* ``<catalog-config>`` and ``<catalog-creds>``: Catalog configuration
|
|
40
|
+
and credentials in JSON format.
|
|
41
|
+
* ``<link-config>`` and ``<link-creds>``: Storage link configuration
|
|
42
|
+
and credentials in JSON format.
|
|
43
|
+
|
|
44
|
+
Remarks
|
|
45
|
+
-------
|
|
46
|
+
* ``FROM <table>`` specifies the SingleStore table to export. The same name will
|
|
47
|
+
be used for the exported table.
|
|
48
|
+
* ``CATALOG`` specifies the details of the catalog to connect to.
|
|
49
|
+
* ``LINK`` specifies the details of the data storage to connect to.
|
|
50
|
+
|
|
51
|
+
Example
|
|
52
|
+
-------
|
|
53
|
+
The following statement creates a cluster identity for the catalog
|
|
54
|
+
and link::
|
|
55
|
+
|
|
56
|
+
CREATE CLUSTER IDENTITY
|
|
57
|
+
CATALOG CONFIG '{
|
|
58
|
+
"type": "GLUE",
|
|
59
|
+
"table_format": "ICEBERG",
|
|
60
|
+
"id": "13983498723498",
|
|
61
|
+
"region": "us-east-1"
|
|
62
|
+
}'
|
|
63
|
+
LINK S3 CONFIG '{
|
|
64
|
+
"region": "us-east-1",
|
|
65
|
+
"endpoint_url": "s3://bucket-name"
|
|
66
|
+
|
|
67
|
+
}'
|
|
68
|
+
;
|
|
69
|
+
|
|
70
|
+
"""
|
|
71
|
+
|
|
72
|
+
def run(self, params: Dict[str, Any]) -> Optional[FusionSQLResult]:
|
|
73
|
+
# Catalog
|
|
74
|
+
catalog_config = json.loads(params['catalog'].get('catalog_config', '{}') or '{}')
|
|
75
|
+
catalog_creds = json.loads(params['catalog'].get('catalog_creds', '{}') or '{}')
|
|
76
|
+
|
|
77
|
+
# Storage
|
|
78
|
+
storage_config = json.loads(params['storage'].get('link_config', '{}') or '{}')
|
|
79
|
+
storage_creds = json.loads(params['storage'].get('link_creds', '{}') or '{}')
|
|
80
|
+
|
|
81
|
+
wsg = get_workspace_group({})
|
|
82
|
+
|
|
83
|
+
if wsg._manager is None:
|
|
84
|
+
raise TypeError('no workspace manager is associated with workspace group')
|
|
85
|
+
|
|
86
|
+
out = ExportService(
|
|
87
|
+
wsg,
|
|
88
|
+
'none',
|
|
89
|
+
'none',
|
|
90
|
+
dict(**catalog_config, **catalog_creds),
|
|
91
|
+
dict(**storage_config, **storage_creds),
|
|
92
|
+
columns=None,
|
|
93
|
+
).create_cluster_identity()
|
|
94
|
+
|
|
95
|
+
res = FusionSQLResult()
|
|
96
|
+
res.add_field('Identity', result.STRING)
|
|
97
|
+
res.set_rows([(out['identity'],)])
|
|
98
|
+
|
|
99
|
+
return res
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
CreateClusterIdentity.register(overwrite=True)
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
class CreateExport(SQLHandler):
|
|
106
|
+
"""
|
|
107
|
+
START EXPORT
|
|
108
|
+
from_table
|
|
109
|
+
catalog
|
|
110
|
+
storage
|
|
111
|
+
;
|
|
112
|
+
|
|
113
|
+
# From table
|
|
114
|
+
from_table = FROM <table>
|
|
115
|
+
|
|
116
|
+
# Catolog
|
|
117
|
+
catalog = CATALOG [ _catalog_config ] [ _catalog_creds ]
|
|
118
|
+
_catalog_config = CONFIG '<catalog-config>'
|
|
119
|
+
_catalog_creds = CREDENTIALS '<catalog-creds>'
|
|
120
|
+
|
|
121
|
+
# Storage
|
|
122
|
+
storage = LINK [ _link_config ] [ _link_creds ]
|
|
123
|
+
_link_config = S3 CONFIG '<link-config>'
|
|
124
|
+
_link_creds = CREDENTIALS '<link-creds>'
|
|
125
|
+
|
|
126
|
+
Description
|
|
127
|
+
-----------
|
|
128
|
+
Create an export configuration.
|
|
129
|
+
|
|
130
|
+
Arguments
|
|
131
|
+
---------
|
|
132
|
+
* ``<catalog-config>`` and ``<catalog-creds>``: The catalog configuration.
|
|
133
|
+
* ``<link-config>`` and ``<link-creds>``: The storage link configuration.
|
|
134
|
+
|
|
135
|
+
Remarks
|
|
136
|
+
-------
|
|
137
|
+
* ``FROM <table>`` specifies the SingleStore table to export. The same name will
|
|
138
|
+
be used for the exported table.
|
|
139
|
+
* ``CATALOG`` specifies the details of the catalog to connect to.
|
|
140
|
+
* ``LINK`` specifies the details of the data storage to connect to.
|
|
141
|
+
|
|
142
|
+
Examples
|
|
143
|
+
--------
|
|
144
|
+
The following statement starts an export operation with the given
|
|
145
|
+
catalog and link configurations. The source table to export is
|
|
146
|
+
named "customer_data"::
|
|
147
|
+
|
|
148
|
+
START EXPORT FROM customer_data
|
|
149
|
+
CATALOG CONFIG '{
|
|
150
|
+
"type": "GLUE",
|
|
151
|
+
"table_format": "ICEBERG",
|
|
152
|
+
"id": "13983498723498",
|
|
153
|
+
"region": "us-east-1"
|
|
154
|
+
}'
|
|
155
|
+
LINK S3 CONFIG '{
|
|
156
|
+
"region": "us-east-1",
|
|
157
|
+
"endpoint_url": "s3://bucket-name"
|
|
158
|
+
|
|
159
|
+
}'
|
|
160
|
+
;
|
|
161
|
+
|
|
162
|
+
""" # noqa
|
|
163
|
+
|
|
164
|
+
def run(self, params: Dict[str, Any]) -> Optional[FusionSQLResult]:
|
|
165
|
+
# From table
|
|
166
|
+
if isinstance(params['from_table'], str):
|
|
167
|
+
from_database = None
|
|
168
|
+
from_table = params['from_table']
|
|
169
|
+
else:
|
|
170
|
+
from_database, from_table = params['from_table']
|
|
171
|
+
|
|
172
|
+
# Catalog
|
|
173
|
+
catalog_config = json.loads(params['catalog'].get('catalog_config', '{}') or '{}')
|
|
174
|
+
catalog_creds = json.loads(params['catalog'].get('catalog_creds', '{}') or '{}')
|
|
175
|
+
|
|
176
|
+
# Storage
|
|
177
|
+
storage_config = json.loads(params['storage'].get('link_config', '{}') or '{}')
|
|
178
|
+
storage_creds = json.loads(params['storage'].get('link_creds', '{}') or '{}')
|
|
179
|
+
|
|
180
|
+
wsg = get_workspace_group({})
|
|
181
|
+
|
|
182
|
+
if from_database is None:
|
|
183
|
+
raise ValueError('database name must be specified for source table')
|
|
184
|
+
|
|
185
|
+
if wsg._manager is None:
|
|
186
|
+
raise TypeError('no workspace manager is associated with workspace group')
|
|
187
|
+
|
|
188
|
+
out = ExportService(
|
|
189
|
+
wsg,
|
|
190
|
+
from_database,
|
|
191
|
+
from_table,
|
|
192
|
+
dict(**catalog_config, **catalog_creds),
|
|
193
|
+
dict(**storage_config, **storage_creds),
|
|
194
|
+
columns=None,
|
|
195
|
+
).start()
|
|
196
|
+
|
|
197
|
+
res = FusionSQLResult()
|
|
198
|
+
res.add_field('ExportID', result.STRING)
|
|
199
|
+
res.set_rows([(out.export_id,)])
|
|
200
|
+
|
|
201
|
+
return res
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
CreateExport.register(overwrite=True)
|
|
205
|
+
|
|
206
|
+
|
|
207
|
+
class ShowExport(SQLHandler):
|
|
208
|
+
"""
|
|
209
|
+
SHOW EXPORT export_id;
|
|
210
|
+
|
|
211
|
+
# ID of export
|
|
212
|
+
export_id = '<export-id>'
|
|
213
|
+
|
|
214
|
+
"""
|
|
215
|
+
|
|
216
|
+
def run(self, params: Dict[str, Any]) -> Optional[FusionSQLResult]:
|
|
217
|
+
wsg = get_workspace_group({})
|
|
218
|
+
out = ExportStatus(params['export_id'], wsg)
|
|
219
|
+
|
|
220
|
+
status = out._info()
|
|
221
|
+
|
|
222
|
+
res = FusionSQLResult()
|
|
223
|
+
res.add_field('ExportID', result.STRING)
|
|
224
|
+
res.add_field('Status', result.STRING)
|
|
225
|
+
res.add_field('Message', result.STRING)
|
|
226
|
+
res.set_rows([
|
|
227
|
+
(
|
|
228
|
+
params['export_id'],
|
|
229
|
+
status.get('status', 'Unknown'),
|
|
230
|
+
status.get('statusMsg', ''),
|
|
231
|
+
),
|
|
232
|
+
])
|
|
233
|
+
|
|
234
|
+
return res
|
|
235
|
+
|
|
236
|
+
|
|
237
|
+
ShowExport.register(overwrite=True)
|