databricks-sdk 0.20.0__tar.gz → 0.21.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

Files changed (61) hide show
  1. {databricks-sdk-0.20.0/databricks_sdk.egg-info → databricks-sdk-0.21.0}/PKG-INFO +2 -1
  2. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/README.md +1 -0
  3. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks/sdk/__init__.py +21 -6
  4. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks/sdk/_widgets/__init__.py +2 -2
  5. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks/sdk/config.py +3 -2
  6. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks/sdk/oauth.py +1 -1
  7. databricks-sdk-0.21.0/databricks/sdk/runtime/__init__.py +182 -0
  8. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks/sdk/runtime/dbutils_stub.py +1 -1
  9. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks/sdk/service/_internal.py +1 -1
  10. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks/sdk/service/billing.py +42 -0
  11. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks/sdk/service/catalog.py +245 -44
  12. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks/sdk/service/compute.py +334 -13
  13. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks/sdk/service/dashboards.py +14 -0
  14. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks/sdk/service/files.py +154 -12
  15. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks/sdk/service/iam.py +161 -0
  16. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks/sdk/service/jobs.py +95 -8
  17. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks/sdk/service/ml.py +350 -0
  18. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks/sdk/service/oauth2.py +70 -0
  19. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks/sdk/service/pipelines.py +66 -8
  20. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks/sdk/service/provisioning.py +78 -36
  21. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks/sdk/service/serving.py +28 -0
  22. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks/sdk/service/settings.py +1292 -203
  23. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks/sdk/service/sharing.py +56 -0
  24. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks/sdk/service/sql.py +138 -11
  25. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks/sdk/service/vectorsearch.py +95 -60
  26. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks/sdk/service/workspace.py +141 -1
  27. databricks-sdk-0.21.0/databricks/sdk/version.py +1 -0
  28. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0/databricks_sdk.egg-info}/PKG-INFO +2 -1
  29. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks_sdk.egg-info/SOURCES.txt +0 -1
  30. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks_sdk.egg-info/requires.txt +1 -0
  31. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/setup.py +2 -1
  32. databricks-sdk-0.20.0/databricks/sdk/runtime/__init__.py +0 -108
  33. databricks-sdk-0.20.0/databricks/sdk/runtime/stub.py +0 -48
  34. databricks-sdk-0.20.0/databricks/sdk/version.py +0 -1
  35. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/LICENSE +0 -0
  36. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/NOTICE +0 -0
  37. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks/__init__.py +0 -0
  38. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks/sdk/_widgets/default_widgets_utils.py +0 -0
  39. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks/sdk/_widgets/ipywidgets_utils.py +0 -0
  40. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks/sdk/azure.py +0 -0
  41. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks/sdk/casing.py +0 -0
  42. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks/sdk/clock.py +0 -0
  43. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks/sdk/core.py +0 -0
  44. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks/sdk/credentials_provider.py +0 -0
  45. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks/sdk/dbutils.py +0 -0
  46. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks/sdk/environments.py +0 -0
  47. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks/sdk/errors/__init__.py +0 -0
  48. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks/sdk/errors/base.py +0 -0
  49. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks/sdk/errors/mapper.py +0 -0
  50. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks/sdk/errors/platform.py +0 -0
  51. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks/sdk/errors/sdk.py +0 -0
  52. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks/sdk/mixins/__init__.py +0 -0
  53. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks/sdk/mixins/compute.py +0 -0
  54. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks/sdk/mixins/files.py +0 -0
  55. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks/sdk/mixins/workspace.py +0 -0
  56. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks/sdk/py.typed +0 -0
  57. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks/sdk/retries.py +0 -0
  58. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks/sdk/service/__init__.py +0 -0
  59. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks_sdk.egg-info/dependency_links.txt +0 -0
  60. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/databricks_sdk.egg-info/top_level.txt +0 -0
  61. {databricks-sdk-0.20.0 → databricks-sdk-0.21.0}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: databricks-sdk
3
- Version: 0.20.0
3
+ Version: 0.21.0
4
4
  Summary: Databricks SDK for Python (Beta)
5
5
  Home-page: https://databricks-sdk-py.readthedocs.io
6
6
  Author: Serge Smertin
@@ -34,6 +34,7 @@ License-File: NOTICE
34
34
  [![databricks-sdk](https://snyk.io/advisor/python/databricks-sdk/badge.svg)](https://snyk.io/advisor/python/databricks-sdk)
35
35
  ![PyPI](https://img.shields.io/pypi/v/databricks-sdk)
36
36
  [![codecov](https://codecov.io/gh/databricks/databricks-sdk-py/branch/main/graph/badge.svg?token=GU63K7WDBE)](https://codecov.io/gh/databricks/databricks-sdk-py)
37
+ [![lines of code](https://tokei.rs/b1/github/databricks/databricks-sdk-py)]([https://codecov.io/github/databricks/databricks-sdk-py](https://github.com/databricks/databricks-sdk-py))
37
38
 
38
39
  [Beta](https://docs.databricks.com/release-notes/release-types.html): This SDK is supported for production use cases,
39
40
  but we do expect future releases to have some interface changes; see [Interface stability](#interface-stability).
@@ -5,6 +5,7 @@
5
5
  [![databricks-sdk](https://snyk.io/advisor/python/databricks-sdk/badge.svg)](https://snyk.io/advisor/python/databricks-sdk)
6
6
  ![PyPI](https://img.shields.io/pypi/v/databricks-sdk)
7
7
  [![codecov](https://codecov.io/gh/databricks/databricks-sdk-py/branch/main/graph/badge.svg?token=GU63K7WDBE)](https://codecov.io/gh/databricks/databricks-sdk-py)
8
+ [![lines of code](https://tokei.rs/b1/github/databricks/databricks-sdk-py)]([https://codecov.io/github/databricks/databricks-sdk-py](https://github.com/databricks/databricks-sdk-py))
8
9
 
9
10
  [Beta](https://docs.databricks.com/release-notes/release-types.html): This SDK is supported for production use cases,
10
11
  but we do expect future releases to have some interface changes; see [Interface stability](#interface-stability).
@@ -34,9 +34,9 @@ from databricks.sdk.service.iam import (AccountAccessControlAPI,
34
34
  AccountGroupsAPI,
35
35
  AccountServicePrincipalsAPI,
36
36
  AccountUsersAPI, CurrentUserAPI,
37
- GroupsAPI, PermissionsAPI,
38
- ServicePrincipalsAPI, UsersAPI,
39
- WorkspaceAssignmentAPI)
37
+ GroupsAPI, PermissionMigrationAPI,
38
+ PermissionsAPI, ServicePrincipalsAPI,
39
+ UsersAPI, WorkspaceAssignmentAPI)
40
40
  from databricks.sdk.service.jobs import JobsAPI
41
41
  from databricks.sdk.service.ml import ExperimentsAPI, ModelRegistryAPI
42
42
  from databricks.sdk.service.oauth2 import (CustomAppIntegrationAPI,
@@ -52,9 +52,17 @@ from databricks.sdk.service.provisioning import (CredentialsAPI,
52
52
  from databricks.sdk.service.serving import AppsAPI, ServingEndpointsAPI
53
53
  from databricks.sdk.service.settings import (AccountIpAccessListsAPI,
54
54
  AccountSettingsAPI,
55
+ AutomaticClusterUpdateAPI,
55
56
  CredentialsManagerAPI,
57
+ CspEnablementAccountAPI,
58
+ CspEnablementAPI,
59
+ DefaultNamespaceAPI,
60
+ EsmEnablementAccountAPI,
61
+ EsmEnablementAPI,
56
62
  IpAccessListsAPI,
57
63
  NetworkConnectivityAPI,
64
+ PersonalComputeAPI,
65
+ RestrictWorkspaceAdminsAPI,
58
66
  SettingsAPI, TokenManagementAPI,
59
67
  TokensAPI, WorkspaceConfAPI)
60
68
  from databricks.sdk.service.sharing import (CleanRoomsAPI, ProvidersAPI,
@@ -182,6 +190,7 @@ class WorkspaceClient:
182
190
  self._model_registry = ModelRegistryAPI(self._api_client)
183
191
  self._model_versions = ModelVersionsAPI(self._api_client)
184
192
  self._online_tables = OnlineTablesAPI(self._api_client)
193
+ self._permission_migration = PermissionMigrationAPI(self._api_client)
185
194
  self._permissions = PermissionsAPI(self._api_client)
186
195
  self._pipelines = PipelinesAPI(self._api_client)
187
196
  self._policy_families = PolicyFamiliesAPI(self._api_client)
@@ -324,7 +333,7 @@ class WorkspaceClient:
324
333
 
325
334
  @property
326
335
  def files(self) -> FilesAPI:
327
- """The Files API allows you to read, write, list, and delete files and directories."""
336
+ """The Files API is a standard HTTP API that allows you to read, write, list, and delete files and directories by referring to their URI."""
328
337
  return self._files
329
338
 
330
339
  @property
@@ -407,6 +416,11 @@ class WorkspaceClient:
407
416
  """Online tables provide lower latency and higher QPS access to data from Delta tables."""
408
417
  return self._online_tables
409
418
 
419
+ @property
420
+ def permission_migration(self) -> PermissionMigrationAPI:
421
+ """This spec contains undocumented permission migration APIs used in https://github.com/databrickslabs/ucx."""
422
+ return self._permission_migration
423
+
410
424
  @property
411
425
  def permissions(self) -> PermissionsAPI:
412
426
  """Permissions API are used to create read, write, edit, update and manage access for various users on different objects and endpoints."""
@@ -484,7 +498,7 @@ class WorkspaceClient:
484
498
 
485
499
  @property
486
500
  def settings(self) -> SettingsAPI:
487
- """The default namespace setting API allows users to configure the default namespace for a Databricks workspace."""
501
+ """Workspace Settings API allows users to manage settings at the workspace level."""
488
502
  return self._settings
489
503
 
490
504
  @property
@@ -761,7 +775,7 @@ class AccountClient:
761
775
 
762
776
  @property
763
777
  def settings(self) -> AccountSettingsAPI:
764
- """The Personal Compute enablement setting lets you control which users can use the Personal Compute default policy to create compute resources."""
778
+ """Accounts Settings API allows users to manage settings at the account level."""
765
779
  return self._settings
766
780
 
767
781
  @property
@@ -820,6 +834,7 @@ class AccountClient:
820
834
  config.host = config.environment.deployment_url(workspace.deployment_name)
821
835
  config.azure_workspace_resource_id = azure.get_azure_resource_id(workspace)
822
836
  config.account_id = None
837
+ config.init_auth()
823
838
  return WorkspaceClient(config=config)
824
839
 
825
840
  def __repr__(self):
@@ -13,11 +13,11 @@ class WidgetUtils(ABC):
13
13
  def _get(self, name: str) -> str:
14
14
  pass
15
15
 
16
- def getArgument(self, name: str, default_value: typing.Optional[str] = None):
16
+ def getArgument(self, name: str, defaultValue: typing.Optional[str] = None):
17
17
  try:
18
18
  return self.get(name)
19
19
  except Exception:
20
- return default_value
20
+ return defaultValue
21
21
 
22
22
  def remove(self, name: str):
23
23
  self._remove(name)
@@ -87,6 +87,7 @@ class Config:
87
87
  product_version="0.0.0",
88
88
  clock: Clock = None,
89
89
  **kwargs):
90
+ self._header_factory = None
90
91
  self._inner = {}
91
92
  self._user_agent_other_info = []
92
93
  self._credentials_provider = credentials_provider if credentials_provider else DefaultCredentials()
@@ -100,7 +101,7 @@ class Config:
100
101
  self._known_file_config_loader()
101
102
  self._fix_host_if_needed()
102
103
  self._validate()
103
- self._init_auth()
104
+ self.init_auth()
104
105
  self._product = product
105
106
  self._product_version = product_version
106
107
  except ValueError as e:
@@ -436,7 +437,7 @@ class Config:
436
437
  names = " and ".join(sorted(auths_used))
437
438
  raise ValueError(f'validate: more than one authorization method configured: {names}')
438
439
 
439
- def _init_auth(self):
440
+ def init_auth(self):
440
441
  try:
441
442
  self._header_factory = self._credentials_provider(self)
442
443
  self.auth_type = self._credentials_provider.auth_type()
@@ -366,7 +366,7 @@ class OAuthClient:
366
366
 
367
367
  config = Config(host=host, credentials_provider=noop_credentials)
368
368
  if not scopes:
369
- scopes = ['offline_access', 'clusters', 'sql']
369
+ scopes = ['all-apis']
370
370
  if config.is_azure:
371
371
  # Azure AD only supports full access to Azure Databricks.
372
372
  scopes = [f'{config.effective_azure_login_app_id}/user_impersonation', 'offline_access']
@@ -0,0 +1,182 @@
1
+ from __future__ import annotations
2
+
3
+ import logging
4
+ from typing import Dict, Optional, Union, cast
5
+
6
+ logger = logging.getLogger('databricks.sdk')
7
+ is_local_implementation = True
8
+
9
+ # All objects that are injected into the Notebook's user namespace should also be made
10
+ # available to be imported from databricks.sdk.runtime.globals. This import can be used
11
+ # in Python modules so users can access these objects from Files more easily.
12
+ dbruntime_objects = [
13
+ "display", "displayHTML", "dbutils", "table", "sql", "udf", "getArgument", "sc", "sqlContext", "spark"
14
+ ]
15
+
16
+ # DO NOT MOVE THE TRY-CATCH BLOCK BELOW AND DO NOT ADD THINGS BEFORE IT! WILL MAKE TEST FAIL.
17
+ try:
18
+ # We don't want to expose additional entity to user namespace, so
19
+ # a workaround here for exposing required information in notebook environment
20
+ from dbruntime.sdk_credential_provider import init_runtime_native_auth
21
+ logger.debug('runtime SDK credential provider available')
22
+ dbruntime_objects.append("init_runtime_native_auth")
23
+ except ImportError:
24
+ init_runtime_native_auth = None
25
+
26
+ globals()["init_runtime_native_auth"] = init_runtime_native_auth
27
+
28
+
29
+ def init_runtime_repl_auth():
30
+ try:
31
+ from dbruntime.databricks_repl_context import get_context
32
+ ctx = get_context()
33
+ if ctx is None:
34
+ logger.debug('Empty REPL context returned, skipping runtime auth')
35
+ return None, None
36
+ if ctx.workspaceUrl is None:
37
+ logger.debug('Workspace URL is not available, skipping runtime auth')
38
+ return None, None
39
+ host = f'https://{ctx.workspaceUrl}'
40
+
41
+ def inner() -> Dict[str, str]:
42
+ ctx = get_context()
43
+ return {'Authorization': f'Bearer {ctx.apiToken}'}
44
+
45
+ return host, inner
46
+ except ImportError:
47
+ return None, None
48
+
49
+
50
+ def init_runtime_legacy_auth():
51
+ try:
52
+ import IPython
53
+ ip_shell = IPython.get_ipython()
54
+ if ip_shell is None:
55
+ return None, None
56
+ global_ns = ip_shell.ns_table["user_global"]
57
+ if 'dbutils' not in global_ns:
58
+ return None, None
59
+ dbutils = global_ns["dbutils"].notebook.entry_point.getDbutils()
60
+ if dbutils is None:
61
+ return None, None
62
+ ctx = dbutils.notebook().getContext()
63
+ if ctx is None:
64
+ return None, None
65
+ host = getattr(ctx, 'apiUrl')().get()
66
+
67
+ def inner() -> Dict[str, str]:
68
+ ctx = dbutils.notebook().getContext()
69
+ return {'Authorization': f'Bearer {getattr(ctx, "apiToken")().get()}'}
70
+
71
+ return host, inner
72
+ except ImportError:
73
+ return None, None
74
+
75
+
76
+ try:
77
+ # Internal implementation
78
+ # Separated from above for backward compatibility
79
+ from dbruntime import UserNamespaceInitializer
80
+
81
+ userNamespaceGlobals = UserNamespaceInitializer.getOrCreate().get_namespace_globals()
82
+ _globals = globals()
83
+ for var in dbruntime_objects:
84
+ if var not in userNamespaceGlobals:
85
+ continue
86
+ _globals[var] = userNamespaceGlobals[var]
87
+ is_local_implementation = False
88
+ except ImportError:
89
+ # OSS implementation
90
+ is_local_implementation = True
91
+
92
+ for var in dbruntime_objects:
93
+ globals()[var] = None
94
+
95
+ # The next few try-except blocks are for initialising globals in a best effort
96
+ # mannaer. We separate them to try to get as many of them working as possible
97
+ try:
98
+ # We expect this to fail and only do this for providing types
99
+ from pyspark.sql.context import SQLContext
100
+ sqlContext: SQLContext = None # type: ignore
101
+ table = sqlContext.table
102
+ except Exception as e:
103
+ logging.debug(f"Failed to initialize globals 'sqlContext' and 'table', continuing. Cause: {e}")
104
+
105
+ try:
106
+ from pyspark.sql.functions import udf # type: ignore
107
+ except ImportError as e:
108
+ logging.debug(f"Failed to initialise udf global: {e}")
109
+
110
+ try:
111
+ from databricks.connect import DatabricksSession # type: ignore
112
+ spark = DatabricksSession.builder.getOrCreate()
113
+ sql = spark.sql # type: ignore
114
+ except Exception as e:
115
+ # We are ignoring all failures here because user might want to initialize
116
+ # spark session themselves and we don't want to interfere with that
117
+ logging.debug(f"Failed to initialize globals 'spark' and 'sql', continuing. Cause: {e}")
118
+
119
+ try:
120
+ # We expect this to fail locally since dbconnect does not support sparkcontext. This is just for typing
121
+ sc = spark.sparkContext
122
+ except Exception as e:
123
+ logging.debug(f"Failed to initialize global 'sc', continuing. Cause: {e}")
124
+
125
+ def display(input=None, *args, **kwargs) -> None: # type: ignore
126
+ """
127
+ Display plots or data.
128
+ Display plot:
129
+ - display() # no-op
130
+ - display(matplotlib.figure.Figure)
131
+ Display dataset:
132
+ - display(spark.DataFrame)
133
+ - display(list) # if list can be converted to DataFrame, e.g., list of named tuples
134
+ - display(pandas.DataFrame)
135
+ - display(koalas.DataFrame)
136
+ - display(pyspark.pandas.DataFrame)
137
+ Display any other value that has a _repr_html_() method
138
+ For Spark 2.0 and 2.1:
139
+ - display(DataFrame, streamName='optional', trigger=optional pyspark.sql.streaming.Trigger,
140
+ checkpointLocation='optional')
141
+ For Spark 2.2+:
142
+ - display(DataFrame, streamName='optional', trigger=optional interval like '1 second',
143
+ checkpointLocation='optional')
144
+ """
145
+ # Import inside the function so that imports are only triggered on usage.
146
+ from IPython import display as IPDisplay
147
+ return IPDisplay.display(input, *args, **kwargs) # type: ignore
148
+
149
+ def displayHTML(html) -> None: # type: ignore
150
+ """
151
+ Display HTML data.
152
+ Parameters
153
+ ----------
154
+ data : URL or HTML string
155
+ If data is a URL, display the resource at that URL, the resource is loaded dynamically by the browser.
156
+ Otherwise data should be the HTML to be displayed.
157
+ See also:
158
+ IPython.display.HTML
159
+ IPython.display.display_html
160
+ """
161
+ # Import inside the function so that imports are only triggered on usage.
162
+ from IPython import display as IPDisplay
163
+ return IPDisplay.display_html(html, raw=True) # type: ignore
164
+
165
+ # We want to propagate the error in initialising dbutils because this is a core
166
+ # functionality of the sdk
167
+ from databricks.sdk.dbutils import RemoteDbUtils
168
+
169
+ from . import dbutils_stub
170
+ dbutils_type = Union[dbutils_stub.dbutils, RemoteDbUtils]
171
+
172
+ dbutils = RemoteDbUtils()
173
+ dbutils = cast(dbutils_type, dbutils)
174
+
175
+ # We do this to prevent importing widgets implementation prematurely
176
+ # The widget import should prompt users to use the implementation
177
+ # which has ipywidget support.
178
+ def getArgument(name: str, defaultValue: Optional[str] = None):
179
+ return dbutils.widgets.getArgument(name, defaultValue)
180
+
181
+
182
+ __all__ = dbruntime_objects
@@ -288,7 +288,7 @@ class dbutils:
288
288
  ...
289
289
 
290
290
  @staticmethod
291
- def getArgument(name: str, defaultValue: typing.Optional[str] = None) -> str:
291
+ def getArgument(name: str, defaultValue: typing.Optional[str] = None) -> typing.Optional[str]:
292
292
  """Returns the current value of a widget with give name.
293
293
  :param name: Name of the argument to be accessed
294
294
  :param defaultValue: (Deprecated) default value
@@ -3,7 +3,7 @@ from typing import Callable, Dict, Generic, Optional, Type, TypeVar
3
3
 
4
4
 
5
5
  def _from_dict(d: Dict[str, any], field: str, cls: Type) -> any:
6
- if field not in d or not d[field]:
6
+ if field not in d or d[field] is None:
7
7
  return None
8
8
  return getattr(cls, 'from_dict')(d[field])
9
9
 
@@ -315,6 +315,20 @@ class CreateLogDeliveryConfigurationParams:
315
315
  workspace_ids_filter=d.get('workspace_ids_filter', None))
316
316
 
317
317
 
318
+ @dataclass
319
+ class DeleteResponse:
320
+
321
+ def as_dict(self) -> dict:
322
+ """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body."""
323
+ body = {}
324
+ return body
325
+
326
+ @classmethod
327
+ def from_dict(cls, d: Dict[str, any]) -> DeleteResponse:
328
+ """Deserializes the DeleteResponse from a dictionary."""
329
+ return cls()
330
+
331
+
318
332
  class DeliveryStatus(Enum):
319
333
  """The status string for log delivery. Possible values are: * `CREATED`: There were no log delivery
320
334
  attempts since the config was created. * `SUCCEEDED`: The latest attempt of log delivery has
@@ -557,6 +571,20 @@ class OutputFormat(Enum):
557
571
  JSON = 'JSON'
558
572
 
559
573
 
574
+ @dataclass
575
+ class PatchStatusResponse:
576
+
577
+ def as_dict(self) -> dict:
578
+ """Serializes the PatchStatusResponse into a dictionary suitable for use as a JSON request body."""
579
+ body = {}
580
+ return body
581
+
582
+ @classmethod
583
+ def from_dict(cls, d: Dict[str, any]) -> PatchStatusResponse:
584
+ """Deserializes the PatchStatusResponse from a dictionary."""
585
+ return cls()
586
+
587
+
560
588
  @dataclass
561
589
  class UpdateLogDeliveryConfigurationStatusRequest:
562
590
  status: LogDeliveryConfigStatus
@@ -583,6 +611,20 @@ class UpdateLogDeliveryConfigurationStatusRequest:
583
611
  status=_enum(d, 'status', LogDeliveryConfigStatus))
584
612
 
585
613
 
614
+ @dataclass
615
+ class UpdateResponse:
616
+
617
+ def as_dict(self) -> dict:
618
+ """Serializes the UpdateResponse into a dictionary suitable for use as a JSON request body."""
619
+ body = {}
620
+ return body
621
+
622
+ @classmethod
623
+ def from_dict(cls, d: Dict[str, any]) -> UpdateResponse:
624
+ """Deserializes the UpdateResponse from a dictionary."""
625
+ return cls()
626
+
627
+
586
628
  @dataclass
587
629
  class WrappedBudget:
588
630
  budget: Budget