singlestoredb 1.0.4__py3-none-any.whl → 1.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of singlestoredb might be problematic. Click here for more details.
- singlestoredb/__init__.py +1 -1
- singlestoredb/config.py +131 -0
- singlestoredb/connection.py +3 -0
- singlestoredb/converters.py +390 -0
- singlestoredb/functions/dtypes.py +5 -198
- singlestoredb/functions/ext/__init__.py +0 -1
- singlestoredb/functions/ext/asgi.py +671 -153
- singlestoredb/functions/ext/json.py +2 -2
- singlestoredb/functions/ext/mmap.py +174 -67
- singlestoredb/functions/ext/rowdat_1.py +2 -2
- singlestoredb/functions/ext/utils.py +169 -0
- singlestoredb/fusion/handler.py +115 -9
- singlestoredb/fusion/handlers/stage.py +246 -13
- singlestoredb/fusion/handlers/workspace.py +417 -14
- singlestoredb/fusion/registry.py +86 -1
- singlestoredb/http/connection.py +40 -2
- singlestoredb/management/__init__.py +1 -0
- singlestoredb/management/organization.py +4 -0
- singlestoredb/management/utils.py +2 -2
- singlestoredb/management/workspace.py +79 -6
- singlestoredb/mysql/connection.py +81 -0
- singlestoredb/mysql/constants/EXTENDED_TYPE.py +3 -0
- singlestoredb/mysql/constants/FIELD_TYPE.py +16 -0
- singlestoredb/mysql/constants/VECTOR_TYPE.py +6 -0
- singlestoredb/mysql/cursors.py +177 -4
- singlestoredb/mysql/protocol.py +50 -1
- singlestoredb/notebook/__init__.py +15 -0
- singlestoredb/notebook/_objects.py +212 -0
- singlestoredb/tests/test.sql +259 -0
- singlestoredb/tests/test_connection.py +1715 -133
- singlestoredb/tests/test_ext_func.py +2 -2
- singlestoredb/tests/test_ext_func_data.py +1 -1
- singlestoredb/utils/dtypes.py +205 -0
- singlestoredb/utils/results.py +367 -14
- {singlestoredb-1.0.4.dist-info → singlestoredb-1.2.0.dist-info}/METADATA +2 -1
- {singlestoredb-1.0.4.dist-info → singlestoredb-1.2.0.dist-info}/RECORD +40 -34
- {singlestoredb-1.0.4.dist-info → singlestoredb-1.2.0.dist-info}/LICENSE +0 -0
- {singlestoredb-1.0.4.dist-info → singlestoredb-1.2.0.dist-info}/WHEEL +0 -0
- {singlestoredb-1.0.4.dist-info → singlestoredb-1.2.0.dist-info}/entry_points.txt +0 -0
- {singlestoredb-1.0.4.dist-info → singlestoredb-1.2.0.dist-info}/top_level.txt +0 -0
singlestoredb/mysql/cursors.py
CHANGED
|
@@ -4,7 +4,9 @@ from collections import namedtuple
|
|
|
4
4
|
|
|
5
5
|
from . import err
|
|
6
6
|
from ..connection import Cursor as BaseCursor
|
|
7
|
+
from ..utils import results
|
|
7
8
|
from ..utils.debug import log_query
|
|
9
|
+
from ..utils.results import get_schema
|
|
8
10
|
|
|
9
11
|
|
|
10
12
|
#: Regular expression for :meth:`Cursor.executemany`.
|
|
@@ -45,6 +47,7 @@ class Cursor(BaseCursor):
|
|
|
45
47
|
self._connection = connection
|
|
46
48
|
self.warning_count = 0
|
|
47
49
|
self._description = None
|
|
50
|
+
self._format_schema = None
|
|
48
51
|
self._rownumber = 0
|
|
49
52
|
self.rowcount = -1
|
|
50
53
|
self.arraysize = 1
|
|
@@ -62,6 +65,10 @@ class Cursor(BaseCursor):
|
|
|
62
65
|
def description(self):
|
|
63
66
|
return self._description
|
|
64
67
|
|
|
68
|
+
@property
|
|
69
|
+
def _schema(self):
|
|
70
|
+
return self._format_schema
|
|
71
|
+
|
|
65
72
|
@property
|
|
66
73
|
def connection(self):
|
|
67
74
|
return self._connection
|
|
@@ -394,6 +401,7 @@ class Cursor(BaseCursor):
|
|
|
394
401
|
self.rowcount = 0
|
|
395
402
|
self.warning_count = 0
|
|
396
403
|
self._description = None
|
|
404
|
+
self._format_schema = None
|
|
397
405
|
self.lastrowid = None
|
|
398
406
|
self._rows = None
|
|
399
407
|
|
|
@@ -409,6 +417,11 @@ class Cursor(BaseCursor):
|
|
|
409
417
|
if self.rowcount == 18446744073709551615:
|
|
410
418
|
self.rowcount = -1
|
|
411
419
|
self._description = result.description
|
|
420
|
+
if self._description:
|
|
421
|
+
self._format_schema = get_schema(
|
|
422
|
+
self.connection._results_type,
|
|
423
|
+
result.description,
|
|
424
|
+
)
|
|
412
425
|
self.lastrowid = result.insert_id
|
|
413
426
|
self._rows = result.rows
|
|
414
427
|
|
|
@@ -440,6 +453,134 @@ class CursorSV(Cursor):
|
|
|
440
453
|
"""Cursor class for C extension."""
|
|
441
454
|
|
|
442
455
|
|
|
456
|
+
class ArrowCursorMixin:
|
|
457
|
+
"""Fetch methods for Arrow Tables."""
|
|
458
|
+
|
|
459
|
+
def fetchone(self):
|
|
460
|
+
return results.results_to_arrow(
|
|
461
|
+
self.description, super().fetchone(), single=True, schema=self._schema,
|
|
462
|
+
)
|
|
463
|
+
|
|
464
|
+
def fetchall(self):
|
|
465
|
+
return results.results_to_arrow(
|
|
466
|
+
self.description, super().fetchall(), schema=self._schema,
|
|
467
|
+
)
|
|
468
|
+
|
|
469
|
+
def fetchall_unbuffered(self):
|
|
470
|
+
return results.results_to_arrow(
|
|
471
|
+
self.description, super().fetchall_unbuffered(), schema=self._schema,
|
|
472
|
+
)
|
|
473
|
+
|
|
474
|
+
def fetchmany(self, size=None):
|
|
475
|
+
return results.results_to_arrow(
|
|
476
|
+
self.description, super().fetchmany(size), schema=self._schema,
|
|
477
|
+
)
|
|
478
|
+
|
|
479
|
+
|
|
480
|
+
class ArrowCursor(ArrowCursorMixin, Cursor):
|
|
481
|
+
"""A cursor which returns results as an Arrow Table."""
|
|
482
|
+
|
|
483
|
+
|
|
484
|
+
class ArrowCursorSV(ArrowCursorMixin, CursorSV):
|
|
485
|
+
"""A cursor which returns results as an Arrow Table for C extension."""
|
|
486
|
+
|
|
487
|
+
|
|
488
|
+
class NumpyCursorMixin:
|
|
489
|
+
"""Fetch methods for numpy arrays."""
|
|
490
|
+
|
|
491
|
+
def fetchone(self):
|
|
492
|
+
return results.results_to_numpy(
|
|
493
|
+
self.description, super().fetchone(), single=True, schema=self._schema,
|
|
494
|
+
)
|
|
495
|
+
|
|
496
|
+
def fetchall(self):
|
|
497
|
+
return results.results_to_numpy(
|
|
498
|
+
self.description, super().fetchall(), schema=self._schema,
|
|
499
|
+
)
|
|
500
|
+
|
|
501
|
+
def fetchall_unbuffered(self):
|
|
502
|
+
return results.results_to_numpy(
|
|
503
|
+
self.description, super().fetchall_unbuffered(), schema=self._schema,
|
|
504
|
+
)
|
|
505
|
+
|
|
506
|
+
def fetchmany(self, size=None):
|
|
507
|
+
return results.results_to_numpy(
|
|
508
|
+
self.description, super().fetchmany(size), schema=self._schema,
|
|
509
|
+
)
|
|
510
|
+
|
|
511
|
+
|
|
512
|
+
class NumpyCursor(NumpyCursorMixin, Cursor):
|
|
513
|
+
"""A cursor which returns results as a numpy array."""
|
|
514
|
+
|
|
515
|
+
|
|
516
|
+
class NumpyCursorSV(NumpyCursorMixin, CursorSV):
|
|
517
|
+
"""A cursor which returns results as a numpy array for C extension."""
|
|
518
|
+
|
|
519
|
+
|
|
520
|
+
class PandasCursorMixin:
|
|
521
|
+
"""Fetch methods for pandas DataFrames."""
|
|
522
|
+
|
|
523
|
+
def fetchone(self):
|
|
524
|
+
return results.results_to_pandas(
|
|
525
|
+
self.description, super().fetchone(), single=True, schema=self._schema,
|
|
526
|
+
)
|
|
527
|
+
|
|
528
|
+
def fetchall(self):
|
|
529
|
+
return results.results_to_pandas(
|
|
530
|
+
self.description, super().fetchall(), schema=self._schema,
|
|
531
|
+
)
|
|
532
|
+
|
|
533
|
+
def fetchall_unbuffered(self):
|
|
534
|
+
return results.results_to_pandas(
|
|
535
|
+
self.description, super().fetchall_unbuffered(), schema=self._schema,
|
|
536
|
+
)
|
|
537
|
+
|
|
538
|
+
def fetchmany(self, size=None):
|
|
539
|
+
return results.results_to_pandas(
|
|
540
|
+
self.description, super().fetchmany(size), schema=self._schema,
|
|
541
|
+
)
|
|
542
|
+
|
|
543
|
+
|
|
544
|
+
class PandasCursor(PandasCursorMixin, Cursor):
|
|
545
|
+
"""A cursor which returns results as a pandas DataFrame."""
|
|
546
|
+
|
|
547
|
+
|
|
548
|
+
class PandasCursorSV(PandasCursorMixin, CursorSV):
|
|
549
|
+
"""A cursor which returns results as a pandas DataFrame for C extension."""
|
|
550
|
+
|
|
551
|
+
|
|
552
|
+
class PolarsCursorMixin:
|
|
553
|
+
"""Fetch methods for polars DataFrames."""
|
|
554
|
+
|
|
555
|
+
def fetchone(self):
|
|
556
|
+
return results.results_to_polars(
|
|
557
|
+
self.description, super().fetchone(), single=True, schema=self._schema,
|
|
558
|
+
)
|
|
559
|
+
|
|
560
|
+
def fetchall(self):
|
|
561
|
+
return results.results_to_polars(
|
|
562
|
+
self.description, super().fetchall(), schema=self._schema,
|
|
563
|
+
)
|
|
564
|
+
|
|
565
|
+
def fetchall_unbuffered(self):
|
|
566
|
+
return results.results_to_polars(
|
|
567
|
+
self.description, super().fetchall_unbuffered(), schema=self._schema,
|
|
568
|
+
)
|
|
569
|
+
|
|
570
|
+
def fetchmany(self, size=None):
|
|
571
|
+
return results.results_to_polars(
|
|
572
|
+
self.description, super().fetchmany(size), schema=self._schema,
|
|
573
|
+
)
|
|
574
|
+
|
|
575
|
+
|
|
576
|
+
class PolarsCursor(PolarsCursorMixin, Cursor):
|
|
577
|
+
"""A cursor which returns results as a polars DataFrame."""
|
|
578
|
+
|
|
579
|
+
|
|
580
|
+
class PolarsCursorSV(PolarsCursorMixin, CursorSV):
|
|
581
|
+
"""A cursor which returns results as a polars DataFrame for C extension."""
|
|
582
|
+
|
|
583
|
+
|
|
443
584
|
class DictCursorMixin:
|
|
444
585
|
# You can override this to use OrderedDict or other dict-like types.
|
|
445
586
|
dict_type = dict
|
|
@@ -698,16 +839,48 @@ class SSCursorSV(SSCursor):
|
|
|
698
839
|
|
|
699
840
|
|
|
700
841
|
class SSDictCursor(DictCursorMixin, SSCursor):
|
|
701
|
-
"""An unbuffered cursor, which returns results as a dictionary"""
|
|
842
|
+
"""An unbuffered cursor, which returns results as a dictionary."""
|
|
702
843
|
|
|
703
844
|
|
|
704
845
|
class SSDictCursorSV(SSCursorSV):
|
|
705
|
-
"""An unbuffered cursor for the C extension, which returns a dictionary"""
|
|
846
|
+
"""An unbuffered cursor for the C extension, which returns a dictionary."""
|
|
706
847
|
|
|
707
848
|
|
|
708
849
|
class SSNamedtupleCursor(NamedtupleCursorMixin, SSCursor):
|
|
709
|
-
"""An unbuffered cursor, which returns results as a named tuple"""
|
|
850
|
+
"""An unbuffered cursor, which returns results as a named tuple."""
|
|
710
851
|
|
|
711
852
|
|
|
712
853
|
class SSNamedtupleCursorSV(SSCursorSV):
|
|
713
|
-
"""An unbuffered cursor for the C extension, which returns results as
|
|
854
|
+
"""An unbuffered cursor for the C extension, which returns results as named tuple."""
|
|
855
|
+
|
|
856
|
+
|
|
857
|
+
class SSArrowCursor(ArrowCursorMixin, SSCursor):
|
|
858
|
+
"""An unbuffered cursor, which returns results as an Arrow Table."""
|
|
859
|
+
|
|
860
|
+
|
|
861
|
+
class SSArrowCursorSV(ArrowCursorMixin, SSCursorSV):
|
|
862
|
+
"""An unbuffered cursor, which returns results as an Arrow Table (accelerated)."""
|
|
863
|
+
|
|
864
|
+
|
|
865
|
+
class SSNumpyCursor(NumpyCursorMixin, SSCursor):
|
|
866
|
+
"""An unbuffered cursor, which returns results as a numpy array."""
|
|
867
|
+
|
|
868
|
+
|
|
869
|
+
class SSNumpyCursorSV(NumpyCursorMixin, SSCursorSV):
|
|
870
|
+
"""An unbuffered cursor, which returns results as a numpy array (accelerated)."""
|
|
871
|
+
|
|
872
|
+
|
|
873
|
+
class SSPandasCursor(PandasCursorMixin, SSCursor):
|
|
874
|
+
"""An unbuffered cursor, which returns results as a pandas DataFrame."""
|
|
875
|
+
|
|
876
|
+
|
|
877
|
+
class SSPandasCursorSV(PandasCursorMixin, SSCursorSV):
|
|
878
|
+
"""An unbuffered cursor, which returns results as a pandas DataFrame (accelerated)."""
|
|
879
|
+
|
|
880
|
+
|
|
881
|
+
class SSPolarsCursor(PolarsCursorMixin, SSCursor):
|
|
882
|
+
"""An unbuffered cursor, which returns results as a polars DataFrame."""
|
|
883
|
+
|
|
884
|
+
|
|
885
|
+
class SSPolarsCursorSV(PolarsCursorMixin, SSCursorSV):
|
|
886
|
+
"""An unbuffered cursor, which returns results as a polars DataFrame (accelerated)."""
|
singlestoredb/mysql/protocol.py
CHANGED
|
@@ -8,8 +8,10 @@ from . import err
|
|
|
8
8
|
from ..config import get_option
|
|
9
9
|
from ..utils.results import Description
|
|
10
10
|
from .charset import MBLENGTH
|
|
11
|
+
from .constants import EXTENDED_TYPE
|
|
11
12
|
from .constants import FIELD_TYPE
|
|
12
13
|
from .constants import SERVER_STATUS
|
|
14
|
+
from .constants import VECTOR_TYPE
|
|
13
15
|
|
|
14
16
|
|
|
15
17
|
DEBUG = get_option('debug.connection')
|
|
@@ -264,16 +266,63 @@ class FieldDescriptorPacket(MysqlPacket):
|
|
|
264
266
|
self.org_table = self.read_length_coded_string().decode(encoding)
|
|
265
267
|
self.name = self.read_length_coded_string().decode(encoding)
|
|
266
268
|
self.org_name = self.read_length_coded_string().decode(encoding)
|
|
269
|
+
n_bytes = 0
|
|
267
270
|
(
|
|
271
|
+
n_bytes,
|
|
268
272
|
self.charsetnr,
|
|
269
273
|
self.length,
|
|
270
274
|
self.type_code,
|
|
271
275
|
self.flags,
|
|
272
276
|
self.scale,
|
|
273
|
-
) = self.read_struct('<
|
|
277
|
+
) = self.read_struct('<BHIBHBxx')
|
|
278
|
+
|
|
274
279
|
# 'default' is a length coded binary and is still in the buffer?
|
|
275
280
|
# not used for normal result sets...
|
|
276
281
|
|
|
282
|
+
# Extended types
|
|
283
|
+
if n_bytes > 12:
|
|
284
|
+
ext_type_code = self.read_uint8()
|
|
285
|
+
if ext_type_code == EXTENDED_TYPE.NONE:
|
|
286
|
+
pass
|
|
287
|
+
elif ext_type_code == EXTENDED_TYPE.BSON:
|
|
288
|
+
self.type_code = FIELD_TYPE.BSON
|
|
289
|
+
elif ext_type_code == EXTENDED_TYPE.VECTOR:
|
|
290
|
+
(self.length, vec_type) = self.read_struct('<IB')
|
|
291
|
+
if vec_type == VECTOR_TYPE.FLOAT32:
|
|
292
|
+
if self.charsetnr == 63:
|
|
293
|
+
self.type_code = FIELD_TYPE.FLOAT32_VECTOR
|
|
294
|
+
else:
|
|
295
|
+
self.type_code = FIELD_TYPE.FLOAT32_VECTOR_JSON
|
|
296
|
+
elif vec_type == VECTOR_TYPE.FLOAT64:
|
|
297
|
+
if self.charsetnr == 63:
|
|
298
|
+
self.type_code = FIELD_TYPE.FLOAT64_VECTOR
|
|
299
|
+
else:
|
|
300
|
+
self.type_code = FIELD_TYPE.FLOAT64_VECTOR_JSON
|
|
301
|
+
elif vec_type == VECTOR_TYPE.INT8:
|
|
302
|
+
if self.charsetnr == 63:
|
|
303
|
+
self.type_code = FIELD_TYPE.INT8_VECTOR
|
|
304
|
+
else:
|
|
305
|
+
self.type_code = FIELD_TYPE.INT8_VECTOR_JSON
|
|
306
|
+
elif vec_type == VECTOR_TYPE.INT16:
|
|
307
|
+
if self.charsetnr == 63:
|
|
308
|
+
self.type_code = FIELD_TYPE.INT16_VECTOR
|
|
309
|
+
else:
|
|
310
|
+
self.type_code = FIELD_TYPE.INT16_VECTOR_JSON
|
|
311
|
+
elif vec_type == VECTOR_TYPE.INT32:
|
|
312
|
+
if self.charsetnr == 63:
|
|
313
|
+
self.type_code = FIELD_TYPE.INT32_VECTOR
|
|
314
|
+
else:
|
|
315
|
+
self.type_code = FIELD_TYPE.INT32_VECTOR_JSON
|
|
316
|
+
elif vec_type == VECTOR_TYPE.INT64:
|
|
317
|
+
if self.charsetnr == 63:
|
|
318
|
+
self.type_code = FIELD_TYPE.INT64_VECTOR
|
|
319
|
+
else:
|
|
320
|
+
self.type_code = FIELD_TYPE.INT64_VECTOR_JSON
|
|
321
|
+
else:
|
|
322
|
+
raise TypeError(f'unrecognized vector data type: {vec_type}')
|
|
323
|
+
else:
|
|
324
|
+
raise TypeError(f'unrecognized extended data type: {ext_type_code}')
|
|
325
|
+
|
|
277
326
|
def description(self):
|
|
278
327
|
"""Provides a 7-item tuple compatible with the Python PEP249 DB Spec."""
|
|
279
328
|
return Description(
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
import os as _os
|
|
3
|
+
import warnings as _warnings
|
|
4
|
+
|
|
5
|
+
from ._objects import organization # noqa: F401
|
|
6
|
+
from ._objects import secrets # noqa: F401
|
|
7
|
+
from ._objects import stage # noqa: F401
|
|
8
|
+
from ._objects import workspace # noqa: F401
|
|
9
|
+
from ._objects import workspace_group # noqa: F401
|
|
10
|
+
|
|
11
|
+
if 'SINGLESTOREDB_ORGANIZATION' not in _os.environ:
|
|
12
|
+
_warnings.warn(
|
|
13
|
+
'This package is intended for use in the SingleStoreDB notebook environment',
|
|
14
|
+
RuntimeWarning,
|
|
15
|
+
)
|
|
@@ -0,0 +1,212 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
import functools
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
from ..management import workspace as _ws
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class Secrets(object):
|
|
9
|
+
"""Wrapper for accessing secrets as object attributes."""
|
|
10
|
+
|
|
11
|
+
def __getattr__(self, name: str) -> str:
|
|
12
|
+
if name.startswith('_ipython') or name.startswith('_repr_'):
|
|
13
|
+
raise AttributeError(name)
|
|
14
|
+
return _ws.get_secret(name)
|
|
15
|
+
|
|
16
|
+
def __getitem__(self, name: str) -> str:
|
|
17
|
+
return _ws.get_secret(name)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class Stage(object):
|
|
21
|
+
|
|
22
|
+
def __new__(cls, *args: Any, **kwargs: Any) -> Any:
|
|
23
|
+
# We are remapping the methods and attributes here so that
|
|
24
|
+
# autocomplete still works in Jupyter / IPython, but we
|
|
25
|
+
# bypass the real method / attribute calls and apply them
|
|
26
|
+
# to the currently selected stage.
|
|
27
|
+
for name in [x for x in dir(_ws.Stage) if not x.startswith('_')]:
|
|
28
|
+
if name in ['from_dict', 'refresh', 'update']:
|
|
29
|
+
continue
|
|
30
|
+
attr = getattr(_ws.Stage, name)
|
|
31
|
+
|
|
32
|
+
def make_wrapper(m: str, is_method: bool = False) -> Any:
|
|
33
|
+
if is_method:
|
|
34
|
+
def wrap(self: Stage, *a: Any, **kw: Any) -> Any:
|
|
35
|
+
return getattr(_ws.get_stage(), m)(*a, **kw)
|
|
36
|
+
return functools.update_wrapper(wrap, attr)
|
|
37
|
+
else:
|
|
38
|
+
def wrap(self: Stage, *a: Any, **kw: Any) -> Any:
|
|
39
|
+
return getattr(_ws.get_stage(), m)
|
|
40
|
+
return property(functools.update_wrapper(wrap, attr))
|
|
41
|
+
|
|
42
|
+
setattr(cls, name, make_wrapper(m=name, is_method=callable(attr)))
|
|
43
|
+
|
|
44
|
+
for name in [
|
|
45
|
+
x for x in _ws.Stage.__annotations__.keys()
|
|
46
|
+
if not x.startswith('_')
|
|
47
|
+
]:
|
|
48
|
+
|
|
49
|
+
def make_wrapper(m: str, is_method: bool = False) -> Any:
|
|
50
|
+
def wrap(self: Stage) -> Any:
|
|
51
|
+
return getattr(_ws.get_stage(), m)
|
|
52
|
+
return property(functools.update_wrapper(wrap, attr))
|
|
53
|
+
|
|
54
|
+
setattr(cls, name, make_wrapper(m=name))
|
|
55
|
+
|
|
56
|
+
cls.__doc__ = _ws.Stage.__doc__
|
|
57
|
+
|
|
58
|
+
return super().__new__(cls, *args, **kwargs)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
class WorkspaceGroup(object):
|
|
62
|
+
|
|
63
|
+
def __new__(cls, *args: Any, **kwargs: Any) -> Any:
|
|
64
|
+
# We are remapping the methods and attributes here so that
|
|
65
|
+
# autocomplete still works in Jupyter / IPython, but we
|
|
66
|
+
# bypass the real method / attribute calls and apply them
|
|
67
|
+
# to the currently selected workspace group.
|
|
68
|
+
for name in [x for x in dir(_ws.WorkspaceGroup) if not x.startswith('_')]:
|
|
69
|
+
if name in ['from_dict', 'refresh', 'update']:
|
|
70
|
+
continue
|
|
71
|
+
|
|
72
|
+
attr = getattr(_ws.WorkspaceGroup, name)
|
|
73
|
+
|
|
74
|
+
def make_wrapper(m: str, is_method: bool = False) -> Any:
|
|
75
|
+
if is_method:
|
|
76
|
+
def wrap(self: WorkspaceGroup, *a: Any, **kw: Any) -> Any:
|
|
77
|
+
return getattr(_ws.get_workspace_group(), m)(*a, **kw)
|
|
78
|
+
return functools.update_wrapper(wrap, attr)
|
|
79
|
+
else:
|
|
80
|
+
def wrap(self: WorkspaceGroup, *a: Any, **kw: Any) -> Any:
|
|
81
|
+
return getattr(_ws.get_workspace_group(), m)
|
|
82
|
+
return property(functools.update_wrapper(wrap, attr))
|
|
83
|
+
|
|
84
|
+
setattr(cls, name, make_wrapper(m=name, is_method=callable(attr)))
|
|
85
|
+
|
|
86
|
+
for name in [
|
|
87
|
+
x for x in _ws.WorkspaceGroup.__annotations__.keys()
|
|
88
|
+
if not x.startswith('_')
|
|
89
|
+
]:
|
|
90
|
+
|
|
91
|
+
def make_wrapper(m: str, is_method: bool = False) -> Any:
|
|
92
|
+
def wrap(self: WorkspaceGroup) -> Any:
|
|
93
|
+
return getattr(_ws.get_workspace_group(), m)
|
|
94
|
+
return property(functools.update_wrapper(wrap, attr))
|
|
95
|
+
|
|
96
|
+
setattr(cls, name, make_wrapper(m=name))
|
|
97
|
+
|
|
98
|
+
cls.__doc__ = _ws.WorkspaceGroup.__doc__
|
|
99
|
+
|
|
100
|
+
return super().__new__(cls, *args, **kwargs)
|
|
101
|
+
|
|
102
|
+
def __str__(self) -> str:
|
|
103
|
+
return _ws.get_workspace_group().__str__()
|
|
104
|
+
|
|
105
|
+
def __repr__(self) -> str:
|
|
106
|
+
return _ws.get_workspace_group().__repr__()
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
class Workspace(object):
|
|
110
|
+
|
|
111
|
+
def __new__(cls, *args: Any, **kwargs: Any) -> Any:
|
|
112
|
+
# We are remapping the methods and attributes here so that
|
|
113
|
+
# autocomplete still works in Jupyter / IPython, but we
|
|
114
|
+
# bypass the real method / attribute calls and apply them
|
|
115
|
+
# to the currently selected workspace.
|
|
116
|
+
for name in [x for x in dir(_ws.Workspace) if not x.startswith('_')]:
|
|
117
|
+
if name in ['from_dict', 'refresh', 'update']:
|
|
118
|
+
continue
|
|
119
|
+
|
|
120
|
+
attr = getattr(_ws.Workspace, name)
|
|
121
|
+
|
|
122
|
+
def make_wrapper(m: str, is_method: bool = False) -> Any:
|
|
123
|
+
if is_method:
|
|
124
|
+
def wrap(self: Workspace, *a: Any, **kw: Any) -> Any:
|
|
125
|
+
return getattr(_ws.get_workspace(), m)(*a, **kw)
|
|
126
|
+
return functools.update_wrapper(wrap, attr)
|
|
127
|
+
else:
|
|
128
|
+
def wrap(self: Workspace, *a: Any, **kw: Any) -> Any:
|
|
129
|
+
return getattr(_ws.get_workspace(), m)
|
|
130
|
+
return property(functools.update_wrapper(wrap, attr))
|
|
131
|
+
|
|
132
|
+
setattr(cls, name, make_wrapper(m=name, is_method=callable(attr)))
|
|
133
|
+
|
|
134
|
+
for name in [
|
|
135
|
+
x for x in _ws.Workspace.__annotations__.keys()
|
|
136
|
+
if not x.startswith('_')
|
|
137
|
+
]:
|
|
138
|
+
|
|
139
|
+
def make_wrapper(m: str, is_method: bool = False) -> Any:
|
|
140
|
+
def wrap(self: Workspace) -> Any:
|
|
141
|
+
return getattr(_ws.get_workspace(), m)
|
|
142
|
+
return property(functools.update_wrapper(wrap, attr))
|
|
143
|
+
|
|
144
|
+
setattr(cls, name, make_wrapper(m=name))
|
|
145
|
+
|
|
146
|
+
cls.__doc__ = _ws.Workspace.__doc__
|
|
147
|
+
|
|
148
|
+
return super().__new__(cls, *args, **kwargs)
|
|
149
|
+
|
|
150
|
+
def __str__(self) -> str:
|
|
151
|
+
return _ws.get_workspace().__str__()
|
|
152
|
+
|
|
153
|
+
def __repr__(self) -> str:
|
|
154
|
+
return _ws.get_workspace().__repr__()
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
class Organization(object):
|
|
158
|
+
|
|
159
|
+
def __new__(cls, *args: Any, **kwargs: Any) -> Any:
|
|
160
|
+
# We are remapping the methods and attributes here so that
|
|
161
|
+
# autocomplete still works in Jupyter / IPython, but we
|
|
162
|
+
# bypass the real method / attribute calls and apply them
|
|
163
|
+
# to the currently selected organization.
|
|
164
|
+
for name in [x for x in dir(_ws.Organization) if not x.startswith('_')]:
|
|
165
|
+
if name in ['from_dict', 'refresh', 'update']:
|
|
166
|
+
continue
|
|
167
|
+
|
|
168
|
+
attr = getattr(_ws.Organization, name)
|
|
169
|
+
|
|
170
|
+
def make_wrapper(m: str, is_method: bool = False) -> Any:
|
|
171
|
+
if is_method:
|
|
172
|
+
def wrap(self: Organization, *a: Any, **kw: Any) -> Any:
|
|
173
|
+
return getattr(_ws.get_organization(), m)(*a, **kw)
|
|
174
|
+
return functools.update_wrapper(wrap, attr)
|
|
175
|
+
else:
|
|
176
|
+
def wrap(self: Organization, *a: Any, **kw: Any) -> Any:
|
|
177
|
+
return getattr(_ws.get_organization(), m)
|
|
178
|
+
return property(functools.update_wrapper(wrap, attr))
|
|
179
|
+
|
|
180
|
+
setattr(cls, name, make_wrapper(m=name, is_method=callable(attr)))
|
|
181
|
+
|
|
182
|
+
for name in [
|
|
183
|
+
x for x in _ws.Organization.__annotations__.keys()
|
|
184
|
+
if not x.startswith('_')
|
|
185
|
+
]:
|
|
186
|
+
|
|
187
|
+
def make_wrapper(m: str, is_method: bool = False) -> Any:
|
|
188
|
+
def wrap(self: Organization) -> Any:
|
|
189
|
+
return getattr(_ws.get_organization(), m)
|
|
190
|
+
return property(functools.update_wrapper(wrap, attr))
|
|
191
|
+
|
|
192
|
+
setattr(cls, name, make_wrapper(m=name))
|
|
193
|
+
|
|
194
|
+
cls.__doc__ = _ws.Organization.__doc__
|
|
195
|
+
|
|
196
|
+
return super().__new__(cls, *args, **kwargs)
|
|
197
|
+
|
|
198
|
+
def __str__(self) -> str:
|
|
199
|
+
return _ws.get_organization().__str__()
|
|
200
|
+
|
|
201
|
+
def __repr__(self) -> str:
|
|
202
|
+
return _ws.get_organization().__repr__()
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
secrets = Secrets()
|
|
206
|
+
stage = Stage()
|
|
207
|
+
organization = Organization()
|
|
208
|
+
workspace_group = WorkspaceGroup()
|
|
209
|
+
workspace = Workspace()
|
|
210
|
+
|
|
211
|
+
|
|
212
|
+
__all__ = ['secrets', 'stage', 'workspace', 'workspace_group', 'organization']
|