databricks-sdk 0.40.0__py3-none-any.whl → 0.42.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

@@ -1,6 +1,7 @@
1
1
  import io
2
2
  import logging
3
3
  import urllib.parse
4
+ from abc import ABC, abstractmethod
4
5
  from datetime import timedelta
5
6
  from types import TracebackType
6
7
  from typing import (Any, BinaryIO, Callable, Dict, Iterable, Iterator, List,
@@ -158,16 +159,29 @@ class _BaseClient:
158
159
  if isinstance(data, (str, bytes)):
159
160
  data = io.BytesIO(data.encode('utf-8') if isinstance(data, str) else data)
160
161
 
161
- # Only retry if the request is not a stream or if the stream is seekable and
162
- # we can rewind it. This is necessary to avoid bugs where the retry doesn't
163
- # re-read already read data from the body.
164
- if data is not None and not self._is_seekable_stream(data):
165
- logger.debug(f"Retry disabled for non-seekable stream: type={type(data)}")
166
- call = self._perform
167
- else:
162
+ if not data:
163
+ # The request is not a stream.
168
164
  call = retried(timeout=timedelta(seconds=self._retry_timeout_seconds),
169
165
  is_retryable=self._is_retryable,
170
166
  clock=self._clock)(self._perform)
167
+ elif self._is_seekable_stream(data):
168
+ # Keep track of the initial position of the stream so that we can rewind to it
169
+ # if we need to retry the request.
170
+ initial_data_position = data.tell()
171
+
172
+ def rewind():
173
+ logger.debug(f"Rewinding input data to offset {initial_data_position} before retry")
174
+ data.seek(initial_data_position)
175
+
176
+ call = retried(timeout=timedelta(seconds=self._retry_timeout_seconds),
177
+ is_retryable=self._is_retryable,
178
+ clock=self._clock,
179
+ before_retry=rewind)(self._perform)
180
+ else:
181
+ # Do not retry if the stream is not seekable. This is necessary to avoid bugs
182
+ # where the retry doesn't re-read already read data from the stream.
183
+ logger.debug(f"Retry disabled for non-seekable stream: type={type(data)}")
184
+ call = self._perform
171
185
 
172
186
  response = call(method,
173
187
  url,
@@ -248,12 +262,6 @@ class _BaseClient:
248
262
  files=None,
249
263
  data=None,
250
264
  auth: Callable[[requests.PreparedRequest], requests.PreparedRequest] = None):
251
- # Keep track of the initial position of the stream so that we can rewind it if
252
- # we need to retry the request.
253
- initial_data_position = 0
254
- if self._is_seekable_stream(data):
255
- initial_data_position = data.tell()
256
-
257
265
  response = self._session.request(method,
258
266
  url,
259
267
  params=self._fix_query_string(query),
@@ -265,16 +273,8 @@ class _BaseClient:
265
273
  stream=raw,
266
274
  timeout=self._http_timeout_seconds)
267
275
  self._record_request_log(response, raw=raw or data is not None or files is not None)
268
-
269
276
  error = self._error_parser.get_api_error(response)
270
277
  if error is not None:
271
- # If the request body is a seekable stream, rewind it so that it is ready
272
- # to be read again in case of a retry.
273
- #
274
- # TODO: This should be moved into a "before-retry" hook to avoid one
275
- # unnecessary seek on the last failed retry before aborting.
276
- if self._is_seekable_stream(data):
277
- data.seek(initial_data_position)
278
278
  raise error from None
279
279
 
280
280
  return response
@@ -285,8 +285,20 @@ class _BaseClient:
285
285
  logger.debug(RoundTrip(response, self._debug_headers, self._debug_truncate_bytes, raw).generate())
286
286
 
287
287
 
288
+ class _RawResponse(ABC):
289
+
290
+ @abstractmethod
291
+ # follows Response signature: https://github.com/psf/requests/blob/main/src/requests/models.py#L799
292
+ def iter_content(self, chunk_size: int = 1, decode_unicode: bool = False):
293
+ pass
294
+
295
+ @abstractmethod
296
+ def close(self):
297
+ pass
298
+
299
+
288
300
  class _StreamingResponse(BinaryIO):
289
- _response: requests.Response
301
+ _response: _RawResponse
290
302
  _buffer: bytes
291
303
  _content: Union[Iterator[bytes], None]
292
304
  _chunk_size: Union[int, None]
@@ -298,7 +310,7 @@ class _StreamingResponse(BinaryIO):
298
310
  def flush(self) -> int:
299
311
  pass
300
312
 
301
- def __init__(self, response: requests.Response, chunk_size: Union[int, None] = None):
313
+ def __init__(self, response: _RawResponse, chunk_size: Union[int, None] = None):
302
314
  self._response = response
303
315
  self._buffer = b''
304
316
  self._content = None
@@ -308,7 +320,7 @@ class _StreamingResponse(BinaryIO):
308
320
  if self._closed:
309
321
  raise ValueError("I/O operation on closed file")
310
322
  if not self._content:
311
- self._content = self._response.iter_content(chunk_size=self._chunk_size)
323
+ self._content = self._response.iter_content(chunk_size=self._chunk_size, decode_unicode=False)
312
324
 
313
325
  def __enter__(self) -> BinaryIO:
314
326
  self._open()
databricks/sdk/config.py CHANGED
@@ -92,6 +92,11 @@ class Config:
92
92
  max_connections_per_pool: int = ConfigAttribute()
93
93
  databricks_environment: Optional[DatabricksEnvironment] = None
94
94
 
95
+ enable_experimental_files_api_client: bool = ConfigAttribute(
96
+ env='DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT')
97
+ files_api_client_download_max_total_recovers = None
98
+ files_api_client_download_max_total_recovers_without_progressing = 1
99
+
95
100
  def __init__(
96
101
  self,
97
102
  *,
@@ -167,6 +167,7 @@ def oauth_service_principal(cfg: 'Config') -> Optional[CredentialsProvider]:
167
167
  oidc = cfg.oidc_endpoints
168
168
  if oidc is None:
169
169
  return None
170
+
170
171
  token_source = ClientCredentials(client_id=cfg.client_id,
171
172
  client_secret=cfg.client_secret,
172
173
  token_url=oidc.token_endpoint,
@@ -187,6 +188,7 @@ def oauth_service_principal(cfg: 'Config') -> Optional[CredentialsProvider]:
187
188
  def external_browser(cfg: 'Config') -> Optional[CredentialsProvider]:
188
189
  if cfg.auth_type != 'external-browser':
189
190
  return None
191
+
190
192
  client_id, client_secret = None, None
191
193
  if cfg.client_id:
192
194
  client_id = cfg.client_id
@@ -194,12 +196,11 @@ def external_browser(cfg: 'Config') -> Optional[CredentialsProvider]:
194
196
  elif cfg.azure_client_id:
195
197
  client_id = cfg.azure_client
196
198
  client_secret = cfg.azure_client_secret
197
-
198
199
  if not client_id:
199
200
  client_id = 'databricks-cli'
200
201
 
201
- # Load cached credentials from disk if they exist.
202
- # Note that these are local to the Python SDK and not reused by other SDKs.
202
+ # Load cached credentials from disk if they exist. Note that these are
203
+ # local to the Python SDK and not reused by other SDKs.
203
204
  oidc_endpoints = cfg.oidc_endpoints
204
205
  redirect_url = 'http://localhost:8020'
205
206
  token_cache = TokenCache(host=cfg.host,
@@ -209,17 +210,25 @@ def external_browser(cfg: 'Config') -> Optional[CredentialsProvider]:
209
210
  redirect_url=redirect_url)
210
211
  credentials = token_cache.load()
211
212
  if credentials:
212
- # Force a refresh in case the loaded credentials are expired.
213
- credentials.token()
214
- else:
215
- oauth_client = OAuthClient(oidc_endpoints=oidc_endpoints,
216
- client_id=client_id,
217
- redirect_url=redirect_url,
218
- client_secret=client_secret)
219
- consent = oauth_client.initiate_consent()
220
- if not consent:
221
- return None
222
- credentials = consent.launch_external_browser()
213
+ try:
214
+ # Pro-actively refresh the loaded credentials. This is done
215
+ # to detect if the token is expired and needs to be refreshed
216
+ # by going through the OAuth login flow.
217
+ credentials.token()
218
+ return credentials(cfg)
219
+ # TODO: We should ideally use more specific exceptions.
220
+ except Exception as e:
221
+ logger.warning(f'Failed to refresh cached token: {e}. Initiating new OAuth login flow')
222
+
223
+ oauth_client = OAuthClient(oidc_endpoints=oidc_endpoints,
224
+ client_id=client_id,
225
+ redirect_url=redirect_url,
226
+ client_secret=client_secret)
227
+ consent = oauth_client.initiate_consent()
228
+ if not consent:
229
+ return None
230
+
231
+ credentials = consent.launch_external_browser()
223
232
  token_cache.save(credentials)
224
233
  return credentials(cfg)
225
234
 
@@ -667,12 +676,18 @@ class MetadataServiceTokenSource(Refreshable):
667
676
  self.host = cfg.host
668
677
 
669
678
  def refresh(self) -> Token:
670
- resp = requests.get(self.url,
671
- timeout=self._metadata_service_timeout,
672
- headers={
673
- self.METADATA_SERVICE_VERSION_HEADER: self.METADATA_SERVICE_VERSION,
674
- self.METADATA_SERVICE_HOST_HEADER: self.host
675
- })
679
+ resp = requests.get(
680
+ self.url,
681
+ timeout=self._metadata_service_timeout,
682
+ headers={
683
+ self.METADATA_SERVICE_VERSION_HEADER: self.METADATA_SERVICE_VERSION,
684
+ self.METADATA_SERVICE_HOST_HEADER: self.host
685
+ },
686
+ proxies={
687
+ # Explicitly exclude localhost from being proxied. This is necessary
688
+ # for Metadata URLs which typically point to localhost.
689
+ "no_proxy": "localhost,127.0.0.1"
690
+ })
676
691
  json_resp: dict[str, Union[str, float]] = resp.json()
677
692
  access_token = json_resp.get("access_token", None)
678
693
  if access_token is None:
@@ -3,7 +3,6 @@ from dataclasses import dataclass
3
3
  from typing import Callable, List
4
4
 
5
5
  from databricks.sdk.oauth import Token
6
- from databricks.sdk.service.oauth2 import DataPlaneInfo
7
6
 
8
7
 
9
8
  @dataclass
@@ -19,6 +18,7 @@ class DataPlaneDetails:
19
18
 
20
19
  class DataPlaneService:
21
20
  """Helper class to fetch and manage DataPlane details."""
21
+ from .service.serving import DataPlaneInfo
22
22
 
23
23
  def __init__(self):
24
24
  self._data_plane_info = {}
@@ -1,6 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import base64
4
+ import logging
4
5
  import os
5
6
  import pathlib
6
7
  import platform
@@ -8,19 +9,27 @@ import shutil
8
9
  import sys
9
10
  from abc import ABC, abstractmethod
10
11
  from collections import deque
12
+ from collections.abc import Iterator
11
13
  from io import BytesIO
12
14
  from types import TracebackType
13
15
  from typing import (TYPE_CHECKING, AnyStr, BinaryIO, Generator, Iterable,
14
- Iterator, Type, Union)
16
+ Optional, Type, Union)
15
17
  from urllib import parse
16
18
 
19
+ from requests import RequestException
20
+
21
+ from .._base_client import _RawResponse, _StreamingResponse
17
22
  from .._property import _cached_property
18
23
  from ..errors import NotFound
19
24
  from ..service import files
25
+ from ..service._internal import _escape_multi_segment_path_parameter
26
+ from ..service.files import DownloadResponse
20
27
 
21
28
  if TYPE_CHECKING:
22
29
  from _typeshed import Self
23
30
 
31
+ _LOG = logging.getLogger(__name__)
32
+
24
33
 
25
34
  class _DbfsIO(BinaryIO):
26
35
  MAX_CHUNK_SIZE = 1024 * 1024
@@ -636,3 +645,177 @@ class DbfsExt(files.DbfsAPI):
636
645
  if p.is_dir and not recursive:
637
646
  raise IOError('deleting directories requires recursive flag')
638
647
  p.delete(recursive=recursive)
648
+
649
+
650
+ class FilesExt(files.FilesAPI):
651
+ __doc__ = files.FilesAPI.__doc__
652
+
653
+ def __init__(self, api_client, config: Config):
654
+ super().__init__(api_client)
655
+ self._config = config.copy()
656
+
657
+ def download(self, file_path: str) -> DownloadResponse:
658
+ """Download a file.
659
+
660
+ Downloads a file of any size. The file contents are the response body.
661
+ This is a standard HTTP file download, not a JSON RPC.
662
+
663
+ It is strongly recommended, for fault tolerance reasons,
664
+ to iteratively consume from the stream with a maximum read(size)
665
+ defined instead of using indefinite-size reads.
666
+
667
+ :param file_path: str
668
+ The remote path of the file, e.g. /Volumes/path/to/your/file
669
+
670
+ :returns: :class:`DownloadResponse`
671
+ """
672
+
673
+ initial_response: DownloadResponse = self._download_raw_stream(file_path=file_path,
674
+ start_byte_offset=0,
675
+ if_unmodified_since_timestamp=None)
676
+
677
+ wrapped_response = self._wrap_stream(file_path, initial_response)
678
+ initial_response.contents._response = wrapped_response
679
+ return initial_response
680
+
681
+ def _download_raw_stream(self,
682
+ file_path: str,
683
+ start_byte_offset: int,
684
+ if_unmodified_since_timestamp: Optional[str] = None) -> DownloadResponse:
685
+ headers = {'Accept': 'application/octet-stream', }
686
+
687
+ if start_byte_offset and not if_unmodified_since_timestamp:
688
+ raise Exception("if_unmodified_since_timestamp is required if start_byte_offset is specified")
689
+
690
+ if start_byte_offset:
691
+ headers['Range'] = f'bytes={start_byte_offset}-'
692
+
693
+ if if_unmodified_since_timestamp:
694
+ headers['If-Unmodified-Since'] = if_unmodified_since_timestamp
695
+
696
+ response_headers = ['content-length', 'content-type', 'last-modified', ]
697
+ res = self._api.do('GET',
698
+ f'/api/2.0/fs/files{_escape_multi_segment_path_parameter(file_path)}',
699
+ headers=headers,
700
+ response_headers=response_headers,
701
+ raw=True)
702
+
703
+ result = DownloadResponse.from_dict(res)
704
+ if not isinstance(result.contents, _StreamingResponse):
705
+ raise Exception("Internal error: response contents is of unexpected type: " +
706
+ type(result.contents).__name__)
707
+
708
+ return result
709
+
710
+ def _wrap_stream(self, file_path: str, downloadResponse: DownloadResponse):
711
+ underlying_response = _ResilientIterator._extract_raw_response(downloadResponse)
712
+ return _ResilientResponse(self,
713
+ file_path,
714
+ downloadResponse.last_modified,
715
+ offset=0,
716
+ underlying_response=underlying_response)
717
+
718
+
719
+ class _ResilientResponse(_RawResponse):
720
+
721
+ def __init__(self, api: FilesExt, file_path: str, file_last_modified: str, offset: int,
722
+ underlying_response: _RawResponse):
723
+ self.api = api
724
+ self.file_path = file_path
725
+ self.underlying_response = underlying_response
726
+ self.offset = offset
727
+ self.file_last_modified = file_last_modified
728
+
729
+ def iter_content(self, chunk_size=1, decode_unicode=False):
730
+ if decode_unicode:
731
+ raise ValueError('Decode unicode is not supported')
732
+
733
+ iterator = self.underlying_response.iter_content(chunk_size=chunk_size, decode_unicode=False)
734
+ self.iterator = _ResilientIterator(iterator, self.file_path, self.file_last_modified, self.offset,
735
+ self.api, chunk_size)
736
+ return self.iterator
737
+
738
+ def close(self):
739
+ self.iterator.close()
740
+
741
+
742
+ class _ResilientIterator(Iterator):
743
+ # This class tracks current offset (returned to the client code)
744
+ # and recovers from failures by requesting download from the current offset.
745
+
746
+ @staticmethod
747
+ def _extract_raw_response(download_response: DownloadResponse) -> _RawResponse:
748
+ streaming_response: _StreamingResponse = download_response.contents # this is an instance of _StreamingResponse
749
+ return streaming_response._response
750
+
751
+ def __init__(self, underlying_iterator, file_path: str, file_last_modified: str, offset: int,
752
+ api: FilesExt, chunk_size: int):
753
+ self._underlying_iterator = underlying_iterator
754
+ self._api = api
755
+ self._file_path = file_path
756
+
757
+ # Absolute current offset (0-based), i.e. number of bytes from the beginning of the file
758
+ # that were so far returned to the caller code.
759
+ self._offset = offset
760
+ self._file_last_modified = file_last_modified
761
+ self._chunk_size = chunk_size
762
+
763
+ self._total_recovers_count: int = 0
764
+ self._recovers_without_progressing_count: int = 0
765
+ self._closed: bool = False
766
+
767
+ def _should_recover(self) -> bool:
768
+ if self._total_recovers_count == self._api._config.files_api_client_download_max_total_recovers:
769
+ _LOG.debug("Total recovers limit exceeded")
770
+ return False
771
+ if self._api._config.files_api_client_download_max_total_recovers_without_progressing is not None and self._recovers_without_progressing_count >= self._api._config.files_api_client_download_max_total_recovers_without_progressing:
772
+ _LOG.debug("No progression recovers limit exceeded")
773
+ return False
774
+ return True
775
+
776
+ def _recover(self) -> bool:
777
+ if not self._should_recover():
778
+ return False # recover suppressed, rethrow original exception
779
+
780
+ self._total_recovers_count += 1
781
+ self._recovers_without_progressing_count += 1
782
+
783
+ try:
784
+ self._underlying_iterator.close()
785
+
786
+ _LOG.debug("Trying to recover from offset " + str(self._offset))
787
+
788
+ # following call includes all the required network retries
789
+ downloadResponse = self._api._download_raw_stream(self._file_path, self._offset,
790
+ self._file_last_modified)
791
+ underlying_response = _ResilientIterator._extract_raw_response(downloadResponse)
792
+ self._underlying_iterator = underlying_response.iter_content(chunk_size=self._chunk_size,
793
+ decode_unicode=False)
794
+ _LOG.debug("Recover succeeded")
795
+ return True
796
+ except:
797
+ return False # recover failed, rethrow original exception
798
+
799
+ def __next__(self):
800
+ if self._closed:
801
+ # following _BaseClient
802
+ raise ValueError("I/O operation on closed file")
803
+
804
+ while True:
805
+ try:
806
+ returned_bytes = next(self._underlying_iterator)
807
+ self._offset += len(returned_bytes)
808
+ self._recovers_without_progressing_count = 0
809
+ return returned_bytes
810
+
811
+ except StopIteration:
812
+ raise
813
+
814
+ # https://requests.readthedocs.io/en/latest/user/quickstart/#errors-and-exceptions
815
+ except RequestException:
816
+ if not self._recover():
817
+ raise
818
+
819
+ def close(self):
820
+ self._underlying_iterator.close()
821
+ self._closed = True
@@ -1,4 +1,10 @@
1
- from databricks.sdk.service.serving import ServingEndpointsAPI
1
+ import json as js
2
+ from typing import Dict, Optional
3
+
4
+ from requests import Response
5
+
6
+ from databricks.sdk.service.serving import (ExternalFunctionRequestHttpMethod,
7
+ ServingEndpointsAPI)
2
8
 
3
9
 
4
10
  class ServingEndpointsExt(ServingEndpointsAPI):
@@ -50,3 +56,51 @@ class ServingEndpointsExt(ServingEndpointsAPI):
50
56
  openai_api_base=self._api._cfg.host + "/serving-endpoints",
51
57
  api_key="no-token", # Passing in a placeholder to pass validations, this will not be used
52
58
  http_client=self._get_authorized_http_client())
59
+
60
+ def http_request(self,
61
+ conn: str,
62
+ method: ExternalFunctionRequestHttpMethod,
63
+ path: str,
64
+ *,
65
+ headers: Optional[Dict[str, str]] = None,
66
+ json: Optional[Dict[str, str]] = None,
67
+ params: Optional[Dict[str, str]] = None) -> Response:
68
+ """Make external services call using the credentials stored in UC Connection.
69
+ **NOTE:** Experimental: This API may change or be removed in a future release without warning.
70
+ :param conn: str
71
+ The connection name to use. This is required to identify the external connection.
72
+ :param method: :class:`ExternalFunctionRequestHttpMethod`
73
+ The HTTP method to use (e.g., 'GET', 'POST'). This is required.
74
+ :param path: str
75
+ The relative path for the API endpoint. This is required.
76
+ :param headers: Dict[str,str] (optional)
77
+ Additional headers for the request. If not provided, only auth headers from connections would be
78
+ passed.
79
+ :param json: Dict[str,str] (optional)
80
+ JSON payload for the request.
81
+ :param params: Dict[str,str] (optional)
82
+ Query parameters for the request.
83
+ :returns: :class:`Response`
84
+ """
85
+ response = Response()
86
+ response.status_code = 200
87
+ server_response = super().http_request(connection_name=conn,
88
+ method=method,
89
+ path=path,
90
+ headers=js.dumps(headers) if headers is not None else None,
91
+ json=js.dumps(json) if json is not None else None,
92
+ params=js.dumps(params) if params is not None else None)
93
+
94
+ # Read the content from the HttpRequestResponse object
95
+ if hasattr(server_response, "contents") and hasattr(server_response.contents, "read"):
96
+ raw_content = server_response.contents.read() # Read the bytes
97
+ else:
98
+ raise ValueError("Invalid response from the server.")
99
+
100
+ # Set the raw content
101
+ if isinstance(raw_content, bytes):
102
+ response._content = raw_content
103
+ else:
104
+ raise ValueError("Contents must be bytes.")
105
+
106
+ return response
databricks/sdk/retries.py CHANGED
@@ -13,7 +13,8 @@ def retried(*,
13
13
  on: Sequence[Type[BaseException]] = None,
14
14
  is_retryable: Callable[[BaseException], Optional[str]] = None,
15
15
  timeout=timedelta(minutes=20),
16
- clock: Clock = None):
16
+ clock: Clock = None,
17
+ before_retry: Callable = None):
17
18
  has_allowlist = on is not None
18
19
  has_callback = is_retryable is not None
19
20
  if not (has_allowlist or has_callback) or (has_allowlist and has_callback):
@@ -54,6 +55,9 @@ def retried(*,
54
55
  raise err
55
56
 
56
57
  logger.debug(f'Retrying: {retry_reason} (sleeping ~{sleep}s)')
58
+ if before_retry:
59
+ before_retry()
60
+
57
61
  clock.sleep(sleep + random())
58
62
  attempt += 1
59
63
  raise TimeoutError(f'Timed out after {timeout}') from last_err
@@ -967,25 +967,33 @@ class AppsAPI:
967
967
  attempt += 1
968
968
  raise TimeoutError(f'timed out after {timeout}: {status_message}')
969
969
 
970
- def create(self, *, app: Optional[App] = None) -> Wait[App]:
970
+ def create(self, *, app: Optional[App] = None, no_compute: Optional[bool] = None) -> Wait[App]:
971
971
  """Create an app.
972
972
 
973
973
  Creates a new app.
974
974
 
975
975
  :param app: :class:`App` (optional)
976
+ :param no_compute: bool (optional)
977
+ If true, the app will not be started after creation.
976
978
 
977
979
  :returns:
978
980
  Long-running operation waiter for :class:`App`.
979
981
  See :method:wait_get_app_active for more details.
980
982
  """
981
983
  body = app.as_dict()
984
+ query = {}
985
+ if no_compute is not None: query['no_compute'] = no_compute
982
986
  headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
983
987
 
984
- op_response = self._api.do('POST', '/api/2.0/apps', body=body, headers=headers)
988
+ op_response = self._api.do('POST', '/api/2.0/apps', query=query, body=body, headers=headers)
985
989
  return Wait(self.wait_get_app_active, response=App.from_dict(op_response), name=op_response['name'])
986
990
 
987
- def create_and_wait(self, *, app: Optional[App] = None, timeout=timedelta(minutes=20)) -> App:
988
- return self.create(app=app).result(timeout=timeout)
991
+ def create_and_wait(self,
992
+ *,
993
+ app: Optional[App] = None,
994
+ no_compute: Optional[bool] = None,
995
+ timeout=timedelta(minutes=20)) -> App:
996
+ return self.create(app=app, no_compute=no_compute).result(timeout=timeout)
989
997
 
990
998
  def delete(self, name: str) -> App:
991
999
  """Delete an app.