databricks-sdk 0.27.1__py3-none-any.whl → 0.28.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of databricks-sdk might be problematic. Click here for more details.
- databricks/sdk/__init__.py +9 -9
- databricks/sdk/azure.py +0 -27
- databricks/sdk/config.py +6 -9
- databricks/sdk/core.py +5 -0
- databricks/sdk/environments.py +34 -1
- databricks/sdk/errors/__init__.py +1 -0
- databricks/sdk/errors/mapper.py +4 -0
- databricks/sdk/errors/private_link.py +60 -0
- databricks/sdk/service/catalog.py +666 -628
- databricks/sdk/service/compute.py +72 -105
- databricks/sdk/service/jobs.py +1 -12
- databricks/sdk/service/marketplace.py +9 -31
- databricks/sdk/service/pipelines.py +118 -3
- databricks/sdk/service/serving.py +78 -10
- databricks/sdk/service/sharing.py +37 -2
- databricks/sdk/service/sql.py +0 -1
- databricks/sdk/service/vectorsearch.py +188 -1
- databricks/sdk/service/workspace.py +8 -4
- databricks/sdk/version.py +1 -1
- {databricks_sdk-0.27.1.dist-info → databricks_sdk-0.28.0.dist-info}/METADATA +1 -1
- {databricks_sdk-0.27.1.dist-info → databricks_sdk-0.28.0.dist-info}/RECORD +25 -24
- {databricks_sdk-0.27.1.dist-info → databricks_sdk-0.28.0.dist-info}/LICENSE +0 -0
- {databricks_sdk-0.27.1.dist-info → databricks_sdk-0.28.0.dist-info}/NOTICE +0 -0
- {databricks_sdk-0.27.1.dist-info → databricks_sdk-0.28.0.dist-info}/WHEEL +0 -0
- {databricks_sdk-0.27.1.dist-info → databricks_sdk-0.28.0.dist-info}/top_level.txt +0 -0
|
@@ -1908,34 +1908,6 @@ class SortBy(Enum):
|
|
|
1908
1908
|
SORT_BY_UNSPECIFIED = 'SORT_BY_UNSPECIFIED'
|
|
1909
1909
|
|
|
1910
1910
|
|
|
1911
|
-
@dataclass
|
|
1912
|
-
class SortBySpec:
|
|
1913
|
-
sort_by: SortBy
|
|
1914
|
-
"""The field on which to sort the listing."""
|
|
1915
|
-
|
|
1916
|
-
sort_order: SortOrder
|
|
1917
|
-
"""The order in which to sort the listing."""
|
|
1918
|
-
|
|
1919
|
-
def as_dict(self) -> dict:
|
|
1920
|
-
"""Serializes the SortBySpec into a dictionary suitable for use as a JSON request body."""
|
|
1921
|
-
body = {}
|
|
1922
|
-
if self.sort_by is not None: body['sort_by'] = self.sort_by.value
|
|
1923
|
-
if self.sort_order is not None: body['sort_order'] = self.sort_order.value
|
|
1924
|
-
return body
|
|
1925
|
-
|
|
1926
|
-
@classmethod
|
|
1927
|
-
def from_dict(cls, d: Dict[str, any]) -> SortBySpec:
|
|
1928
|
-
"""Deserializes the SortBySpec from a dictionary."""
|
|
1929
|
-
return cls(sort_by=_enum(d, 'sort_by', SortBy), sort_order=_enum(d, 'sort_order', SortOrder))
|
|
1930
|
-
|
|
1931
|
-
|
|
1932
|
-
class SortOrder(Enum):
|
|
1933
|
-
|
|
1934
|
-
SORT_ORDER_ASCENDING = 'SORT_ORDER_ASCENDING'
|
|
1935
|
-
SORT_ORDER_DESCENDING = 'SORT_ORDER_DESCENDING'
|
|
1936
|
-
SORT_ORDER_UNSPECIFIED = 'SORT_ORDER_UNSPECIFIED'
|
|
1937
|
-
|
|
1938
|
-
|
|
1939
1911
|
@dataclass
|
|
1940
1912
|
class TokenDetail:
|
|
1941
1913
|
bearer_token: Optional[str] = None
|
|
@@ -2579,13 +2551,14 @@ class ConsumerListingsAPI:
|
|
|
2579
2551
|
*,
|
|
2580
2552
|
assets: Optional[List[AssetType]] = None,
|
|
2581
2553
|
categories: Optional[List[Category]] = None,
|
|
2554
|
+
is_ascending: Optional[bool] = None,
|
|
2582
2555
|
is_free: Optional[bool] = None,
|
|
2583
2556
|
is_private_exchange: Optional[bool] = None,
|
|
2584
2557
|
is_staff_pick: Optional[bool] = None,
|
|
2585
2558
|
page_size: Optional[int] = None,
|
|
2586
2559
|
page_token: Optional[str] = None,
|
|
2587
2560
|
provider_ids: Optional[List[str]] = None,
|
|
2588
|
-
|
|
2561
|
+
sort_by: Optional[SortBy] = None,
|
|
2589
2562
|
tags: Optional[List[ListingTag]] = None) -> Iterator[Listing]:
|
|
2590
2563
|
"""List listings.
|
|
2591
2564
|
|
|
@@ -2595,6 +2568,7 @@ class ConsumerListingsAPI:
|
|
|
2595
2568
|
Matches any of the following asset types
|
|
2596
2569
|
:param categories: List[:class:`Category`] (optional)
|
|
2597
2570
|
Matches any of the following categories
|
|
2571
|
+
:param is_ascending: bool (optional)
|
|
2598
2572
|
:param is_free: bool (optional)
|
|
2599
2573
|
Filters each listing based on if it is free.
|
|
2600
2574
|
:param is_private_exchange: bool (optional)
|
|
@@ -2605,7 +2579,7 @@ class ConsumerListingsAPI:
|
|
|
2605
2579
|
:param page_token: str (optional)
|
|
2606
2580
|
:param provider_ids: List[str] (optional)
|
|
2607
2581
|
Matches any of the following provider ids
|
|
2608
|
-
:param
|
|
2582
|
+
:param sort_by: :class:`SortBy` (optional)
|
|
2609
2583
|
Criteria for sorting the resulting set of listings.
|
|
2610
2584
|
:param tags: List[:class:`ListingTag`] (optional)
|
|
2611
2585
|
Matches any of the following tags
|
|
@@ -2616,13 +2590,14 @@ class ConsumerListingsAPI:
|
|
|
2616
2590
|
query = {}
|
|
2617
2591
|
if assets is not None: query['assets'] = [v.value for v in assets]
|
|
2618
2592
|
if categories is not None: query['categories'] = [v.value for v in categories]
|
|
2593
|
+
if is_ascending is not None: query['is_ascending'] = is_ascending
|
|
2619
2594
|
if is_free is not None: query['is_free'] = is_free
|
|
2620
2595
|
if is_private_exchange is not None: query['is_private_exchange'] = is_private_exchange
|
|
2621
2596
|
if is_staff_pick is not None: query['is_staff_pick'] = is_staff_pick
|
|
2622
2597
|
if page_size is not None: query['page_size'] = page_size
|
|
2623
2598
|
if page_token is not None: query['page_token'] = page_token
|
|
2624
2599
|
if provider_ids is not None: query['provider_ids'] = [v for v in provider_ids]
|
|
2625
|
-
if
|
|
2600
|
+
if sort_by is not None: query['sort_by'] = sort_by.value
|
|
2626
2601
|
if tags is not None: query['tags'] = [v.as_dict() for v in tags]
|
|
2627
2602
|
headers = {'Accept': 'application/json', }
|
|
2628
2603
|
|
|
@@ -2640,6 +2615,7 @@ class ConsumerListingsAPI:
|
|
|
2640
2615
|
*,
|
|
2641
2616
|
assets: Optional[List[AssetType]] = None,
|
|
2642
2617
|
categories: Optional[List[Category]] = None,
|
|
2618
|
+
is_ascending: Optional[bool] = None,
|
|
2643
2619
|
is_free: Optional[bool] = None,
|
|
2644
2620
|
is_private_exchange: Optional[bool] = None,
|
|
2645
2621
|
page_size: Optional[int] = None,
|
|
@@ -2657,6 +2633,7 @@ class ConsumerListingsAPI:
|
|
|
2657
2633
|
Matches any of the following asset types
|
|
2658
2634
|
:param categories: List[:class:`Category`] (optional)
|
|
2659
2635
|
Matches any of the following categories
|
|
2636
|
+
:param is_ascending: bool (optional)
|
|
2660
2637
|
:param is_free: bool (optional)
|
|
2661
2638
|
:param is_private_exchange: bool (optional)
|
|
2662
2639
|
:param page_size: int (optional)
|
|
@@ -2671,6 +2648,7 @@ class ConsumerListingsAPI:
|
|
|
2671
2648
|
query = {}
|
|
2672
2649
|
if assets is not None: query['assets'] = [v.value for v in assets]
|
|
2673
2650
|
if categories is not None: query['categories'] = [v.value for v in categories]
|
|
2651
|
+
if is_ascending is not None: query['is_ascending'] = is_ascending
|
|
2674
2652
|
if is_free is not None: query['is_free'] = is_free
|
|
2675
2653
|
if is_private_exchange is not None: query['is_private_exchange'] = is_private_exchange
|
|
2676
2654
|
if page_size is not None: query['page_size'] = page_size
|
|
@@ -57,6 +57,9 @@ class CreatePipeline:
|
|
|
57
57
|
filters: Optional[Filters] = None
|
|
58
58
|
"""Filters on which Pipeline packages to include in the deployed graph."""
|
|
59
59
|
|
|
60
|
+
gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None
|
|
61
|
+
"""The definition of a gateway pipeline to support CDC."""
|
|
62
|
+
|
|
60
63
|
id: Optional[str] = None
|
|
61
64
|
"""Unique identifier for this pipeline."""
|
|
62
65
|
|
|
@@ -104,6 +107,7 @@ class CreatePipeline:
|
|
|
104
107
|
if self.dry_run is not None: body['dry_run'] = self.dry_run
|
|
105
108
|
if self.edition is not None: body['edition'] = self.edition
|
|
106
109
|
if self.filters: body['filters'] = self.filters.as_dict()
|
|
110
|
+
if self.gateway_definition: body['gateway_definition'] = self.gateway_definition.as_dict()
|
|
107
111
|
if self.id is not None: body['id'] = self.id
|
|
108
112
|
if self.ingestion_definition: body['ingestion_definition'] = self.ingestion_definition.as_dict()
|
|
109
113
|
if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries]
|
|
@@ -130,6 +134,7 @@ class CreatePipeline:
|
|
|
130
134
|
dry_run=d.get('dry_run', None),
|
|
131
135
|
edition=d.get('edition', None),
|
|
132
136
|
filters=_from_dict(d, 'filters', Filters),
|
|
137
|
+
gateway_definition=_from_dict(d, 'gateway_definition', IngestionGatewayPipelineDefinition),
|
|
133
138
|
id=d.get('id', None),
|
|
134
139
|
ingestion_definition=_from_dict(d, 'ingestion_definition',
|
|
135
140
|
ManagedIngestionPipelineDefinition),
|
|
@@ -266,6 +271,9 @@ class EditPipeline:
|
|
|
266
271
|
filters: Optional[Filters] = None
|
|
267
272
|
"""Filters on which Pipeline packages to include in the deployed graph."""
|
|
268
273
|
|
|
274
|
+
gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None
|
|
275
|
+
"""The definition of a gateway pipeline to support CDC."""
|
|
276
|
+
|
|
269
277
|
id: Optional[str] = None
|
|
270
278
|
"""Unique identifier for this pipeline."""
|
|
271
279
|
|
|
@@ -317,6 +325,7 @@ class EditPipeline:
|
|
|
317
325
|
if self.expected_last_modified is not None:
|
|
318
326
|
body['expected_last_modified'] = self.expected_last_modified
|
|
319
327
|
if self.filters: body['filters'] = self.filters.as_dict()
|
|
328
|
+
if self.gateway_definition: body['gateway_definition'] = self.gateway_definition.as_dict()
|
|
320
329
|
if self.id is not None: body['id'] = self.id
|
|
321
330
|
if self.ingestion_definition: body['ingestion_definition'] = self.ingestion_definition.as_dict()
|
|
322
331
|
if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries]
|
|
@@ -344,6 +353,7 @@ class EditPipeline:
|
|
|
344
353
|
edition=d.get('edition', None),
|
|
345
354
|
expected_last_modified=d.get('expected_last_modified', None),
|
|
346
355
|
filters=_from_dict(d, 'filters', Filters),
|
|
356
|
+
gateway_definition=_from_dict(d, 'gateway_definition', IngestionGatewayPipelineDefinition),
|
|
347
357
|
id=d.get('id', None),
|
|
348
358
|
ingestion_definition=_from_dict(d, 'ingestion_definition',
|
|
349
359
|
ManagedIngestionPipelineDefinition),
|
|
@@ -570,6 +580,43 @@ class IngestionConfig:
|
|
|
570
580
|
return cls(schema=_from_dict(d, 'schema', SchemaSpec), table=_from_dict(d, 'table', TableSpec))
|
|
571
581
|
|
|
572
582
|
|
|
583
|
+
@dataclass
|
|
584
|
+
class IngestionGatewayPipelineDefinition:
|
|
585
|
+
connection_id: Optional[str] = None
|
|
586
|
+
"""Immutable. The Unity Catalog connection this gateway pipeline uses to communicate with the
|
|
587
|
+
source."""
|
|
588
|
+
|
|
589
|
+
gateway_storage_catalog: Optional[str] = None
|
|
590
|
+
"""Required, Immutable. The name of the catalog for the gateway pipeline's storage location."""
|
|
591
|
+
|
|
592
|
+
gateway_storage_name: Optional[str] = None
|
|
593
|
+
"""Required. The Unity Catalog-compatible naming for the gateway storage location. This is the
|
|
594
|
+
destination to use for the data that is extracted by the gateway. Delta Live Tables system will
|
|
595
|
+
automatically create the storage location under the catalog and schema."""
|
|
596
|
+
|
|
597
|
+
gateway_storage_schema: Optional[str] = None
|
|
598
|
+
"""Required, Immutable. The name of the schema for the gateway pipelines's storage location."""
|
|
599
|
+
|
|
600
|
+
def as_dict(self) -> dict:
|
|
601
|
+
"""Serializes the IngestionGatewayPipelineDefinition into a dictionary suitable for use as a JSON request body."""
|
|
602
|
+
body = {}
|
|
603
|
+
if self.connection_id is not None: body['connection_id'] = self.connection_id
|
|
604
|
+
if self.gateway_storage_catalog is not None:
|
|
605
|
+
body['gateway_storage_catalog'] = self.gateway_storage_catalog
|
|
606
|
+
if self.gateway_storage_name is not None: body['gateway_storage_name'] = self.gateway_storage_name
|
|
607
|
+
if self.gateway_storage_schema is not None:
|
|
608
|
+
body['gateway_storage_schema'] = self.gateway_storage_schema
|
|
609
|
+
return body
|
|
610
|
+
|
|
611
|
+
@classmethod
|
|
612
|
+
def from_dict(cls, d: Dict[str, any]) -> IngestionGatewayPipelineDefinition:
|
|
613
|
+
"""Deserializes the IngestionGatewayPipelineDefinition from a dictionary."""
|
|
614
|
+
return cls(connection_id=d.get('connection_id', None),
|
|
615
|
+
gateway_storage_catalog=d.get('gateway_storage_catalog', None),
|
|
616
|
+
gateway_storage_name=d.get('gateway_storage_name', None),
|
|
617
|
+
gateway_storage_schema=d.get('gateway_storage_schema', None))
|
|
618
|
+
|
|
619
|
+
|
|
573
620
|
@dataclass
|
|
574
621
|
class ListPipelineEventsResponse:
|
|
575
622
|
events: Optional[List[PipelineEvent]] = None
|
|
@@ -659,12 +706,17 @@ class ManagedIngestionPipelineDefinition:
|
|
|
659
706
|
objects: Optional[List[IngestionConfig]] = None
|
|
660
707
|
"""Required. Settings specifying tables to replicate and the destination for the replicated tables."""
|
|
661
708
|
|
|
709
|
+
table_configuration: Optional[TableSpecificConfig] = None
|
|
710
|
+
"""Configuration settings to control the ingestion of tables. These settings are applied to all
|
|
711
|
+
tables in the pipeline."""
|
|
712
|
+
|
|
662
713
|
def as_dict(self) -> dict:
|
|
663
714
|
"""Serializes the ManagedIngestionPipelineDefinition into a dictionary suitable for use as a JSON request body."""
|
|
664
715
|
body = {}
|
|
665
716
|
if self.connection_name is not None: body['connection_name'] = self.connection_name
|
|
666
717
|
if self.ingestion_gateway_id is not None: body['ingestion_gateway_id'] = self.ingestion_gateway_id
|
|
667
718
|
if self.objects: body['objects'] = [v.as_dict() for v in self.objects]
|
|
719
|
+
if self.table_configuration: body['table_configuration'] = self.table_configuration.as_dict()
|
|
668
720
|
return body
|
|
669
721
|
|
|
670
722
|
@classmethod
|
|
@@ -672,7 +724,8 @@ class ManagedIngestionPipelineDefinition:
|
|
|
672
724
|
"""Deserializes the ManagedIngestionPipelineDefinition from a dictionary."""
|
|
673
725
|
return cls(connection_name=d.get('connection_name', None),
|
|
674
726
|
ingestion_gateway_id=d.get('ingestion_gateway_id', None),
|
|
675
|
-
objects=_repeated_dict(d, 'objects', IngestionConfig)
|
|
727
|
+
objects=_repeated_dict(d, 'objects', IngestionConfig),
|
|
728
|
+
table_configuration=_from_dict(d, 'table_configuration', TableSpecificConfig))
|
|
676
729
|
|
|
677
730
|
|
|
678
731
|
@dataclass
|
|
@@ -1344,6 +1397,9 @@ class PipelineSpec:
|
|
|
1344
1397
|
filters: Optional[Filters] = None
|
|
1345
1398
|
"""Filters on which Pipeline packages to include in the deployed graph."""
|
|
1346
1399
|
|
|
1400
|
+
gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None
|
|
1401
|
+
"""The definition of a gateway pipeline to support CDC."""
|
|
1402
|
+
|
|
1347
1403
|
id: Optional[str] = None
|
|
1348
1404
|
"""Unique identifier for this pipeline."""
|
|
1349
1405
|
|
|
@@ -1389,6 +1445,7 @@ class PipelineSpec:
|
|
|
1389
1445
|
if self.development is not None: body['development'] = self.development
|
|
1390
1446
|
if self.edition is not None: body['edition'] = self.edition
|
|
1391
1447
|
if self.filters: body['filters'] = self.filters.as_dict()
|
|
1448
|
+
if self.gateway_definition: body['gateway_definition'] = self.gateway_definition.as_dict()
|
|
1392
1449
|
if self.id is not None: body['id'] = self.id
|
|
1393
1450
|
if self.ingestion_definition: body['ingestion_definition'] = self.ingestion_definition.as_dict()
|
|
1394
1451
|
if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries]
|
|
@@ -1413,6 +1470,7 @@ class PipelineSpec:
|
|
|
1413
1470
|
development=d.get('development', None),
|
|
1414
1471
|
edition=d.get('edition', None),
|
|
1415
1472
|
filters=_from_dict(d, 'filters', Filters),
|
|
1473
|
+
gateway_definition=_from_dict(d, 'gateway_definition', IngestionGatewayPipelineDefinition),
|
|
1416
1474
|
id=d.get('id', None),
|
|
1417
1475
|
ingestion_definition=_from_dict(d, 'ingestion_definition',
|
|
1418
1476
|
ManagedIngestionPipelineDefinition),
|
|
@@ -1523,6 +1581,11 @@ class SchemaSpec:
|
|
|
1523
1581
|
source_schema: Optional[str] = None
|
|
1524
1582
|
"""Required. Schema name in the source database."""
|
|
1525
1583
|
|
|
1584
|
+
table_configuration: Optional[TableSpecificConfig] = None
|
|
1585
|
+
"""Configuration settings to control the ingestion of tables. These settings are applied to all
|
|
1586
|
+
tables in this schema and override the table_configuration defined in the
|
|
1587
|
+
ManagedIngestionPipelineDefinition object."""
|
|
1588
|
+
|
|
1526
1589
|
def as_dict(self) -> dict:
|
|
1527
1590
|
"""Serializes the SchemaSpec into a dictionary suitable for use as a JSON request body."""
|
|
1528
1591
|
body = {}
|
|
@@ -1530,6 +1593,7 @@ class SchemaSpec:
|
|
|
1530
1593
|
if self.destination_schema is not None: body['destination_schema'] = self.destination_schema
|
|
1531
1594
|
if self.source_catalog is not None: body['source_catalog'] = self.source_catalog
|
|
1532
1595
|
if self.source_schema is not None: body['source_schema'] = self.source_schema
|
|
1596
|
+
if self.table_configuration: body['table_configuration'] = self.table_configuration.as_dict()
|
|
1533
1597
|
return body
|
|
1534
1598
|
|
|
1535
1599
|
@classmethod
|
|
@@ -1538,7 +1602,8 @@ class SchemaSpec:
|
|
|
1538
1602
|
return cls(destination_catalog=d.get('destination_catalog', None),
|
|
1539
1603
|
destination_schema=d.get('destination_schema', None),
|
|
1540
1604
|
source_catalog=d.get('source_catalog', None),
|
|
1541
|
-
source_schema=d.get('source_schema', None)
|
|
1605
|
+
source_schema=d.get('source_schema', None),
|
|
1606
|
+
table_configuration=_from_dict(d, 'table_configuration', TableSpecificConfig))
|
|
1542
1607
|
|
|
1543
1608
|
|
|
1544
1609
|
@dataclass
|
|
@@ -1729,6 +1794,10 @@ class TableSpec:
|
|
|
1729
1794
|
source_table: Optional[str] = None
|
|
1730
1795
|
"""Required. Table name in the source database."""
|
|
1731
1796
|
|
|
1797
|
+
table_configuration: Optional[TableSpecificConfig] = None
|
|
1798
|
+
"""Configuration settings to control the ingestion of tables. These settings override the
|
|
1799
|
+
table_configuration defined in the ManagedIngestionPipelineDefinition object and the SchemaSpec."""
|
|
1800
|
+
|
|
1732
1801
|
def as_dict(self) -> dict:
|
|
1733
1802
|
"""Serializes the TableSpec into a dictionary suitable for use as a JSON request body."""
|
|
1734
1803
|
body = {}
|
|
@@ -1738,6 +1807,7 @@ class TableSpec:
|
|
|
1738
1807
|
if self.source_catalog is not None: body['source_catalog'] = self.source_catalog
|
|
1739
1808
|
if self.source_schema is not None: body['source_schema'] = self.source_schema
|
|
1740
1809
|
if self.source_table is not None: body['source_table'] = self.source_table
|
|
1810
|
+
if self.table_configuration: body['table_configuration'] = self.table_configuration.as_dict()
|
|
1741
1811
|
return body
|
|
1742
1812
|
|
|
1743
1813
|
@classmethod
|
|
@@ -1748,7 +1818,44 @@ class TableSpec:
|
|
|
1748
1818
|
destination_table=d.get('destination_table', None),
|
|
1749
1819
|
source_catalog=d.get('source_catalog', None),
|
|
1750
1820
|
source_schema=d.get('source_schema', None),
|
|
1751
|
-
source_table=d.get('source_table', None)
|
|
1821
|
+
source_table=d.get('source_table', None),
|
|
1822
|
+
table_configuration=_from_dict(d, 'table_configuration', TableSpecificConfig))
|
|
1823
|
+
|
|
1824
|
+
|
|
1825
|
+
@dataclass
|
|
1826
|
+
class TableSpecificConfig:
|
|
1827
|
+
primary_keys: Optional[List[str]] = None
|
|
1828
|
+
"""The primary key of the table used to apply changes."""
|
|
1829
|
+
|
|
1830
|
+
salesforce_include_formula_fields: Optional[bool] = None
|
|
1831
|
+
"""If true, formula fields defined in the table are included in the ingestion. This setting is only
|
|
1832
|
+
valid for the Salesforce connector"""
|
|
1833
|
+
|
|
1834
|
+
scd_type: Optional[TableSpecificConfigScdType] = None
|
|
1835
|
+
"""The SCD type to use to ingest the table."""
|
|
1836
|
+
|
|
1837
|
+
def as_dict(self) -> dict:
|
|
1838
|
+
"""Serializes the TableSpecificConfig into a dictionary suitable for use as a JSON request body."""
|
|
1839
|
+
body = {}
|
|
1840
|
+
if self.primary_keys: body['primary_keys'] = [v for v in self.primary_keys]
|
|
1841
|
+
if self.salesforce_include_formula_fields is not None:
|
|
1842
|
+
body['salesforce_include_formula_fields'] = self.salesforce_include_formula_fields
|
|
1843
|
+
if self.scd_type is not None: body['scd_type'] = self.scd_type.value
|
|
1844
|
+
return body
|
|
1845
|
+
|
|
1846
|
+
@classmethod
|
|
1847
|
+
def from_dict(cls, d: Dict[str, any]) -> TableSpecificConfig:
|
|
1848
|
+
"""Deserializes the TableSpecificConfig from a dictionary."""
|
|
1849
|
+
return cls(primary_keys=d.get('primary_keys', None),
|
|
1850
|
+
salesforce_include_formula_fields=d.get('salesforce_include_formula_fields', None),
|
|
1851
|
+
scd_type=_enum(d, 'scd_type', TableSpecificConfigScdType))
|
|
1852
|
+
|
|
1853
|
+
|
|
1854
|
+
class TableSpecificConfigScdType(Enum):
|
|
1855
|
+
"""The SCD type to use to ingest the table."""
|
|
1856
|
+
|
|
1857
|
+
SCD_TYPE_1 = 'SCD_TYPE_1'
|
|
1858
|
+
SCD_TYPE_2 = 'SCD_TYPE_2'
|
|
1752
1859
|
|
|
1753
1860
|
|
|
1754
1861
|
@dataclass
|
|
@@ -1981,6 +2088,7 @@ class PipelinesAPI:
|
|
|
1981
2088
|
dry_run: Optional[bool] = None,
|
|
1982
2089
|
edition: Optional[str] = None,
|
|
1983
2090
|
filters: Optional[Filters] = None,
|
|
2091
|
+
gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None,
|
|
1984
2092
|
id: Optional[str] = None,
|
|
1985
2093
|
ingestion_definition: Optional[ManagedIngestionPipelineDefinition] = None,
|
|
1986
2094
|
libraries: Optional[List[PipelineLibrary]] = None,
|
|
@@ -2019,6 +2127,8 @@ class PipelinesAPI:
|
|
|
2019
2127
|
Pipeline product edition.
|
|
2020
2128
|
:param filters: :class:`Filters` (optional)
|
|
2021
2129
|
Filters on which Pipeline packages to include in the deployed graph.
|
|
2130
|
+
:param gateway_definition: :class:`IngestionGatewayPipelineDefinition` (optional)
|
|
2131
|
+
The definition of a gateway pipeline to support CDC.
|
|
2022
2132
|
:param id: str (optional)
|
|
2023
2133
|
Unique identifier for this pipeline.
|
|
2024
2134
|
:param ingestion_definition: :class:`ManagedIngestionPipelineDefinition` (optional)
|
|
@@ -2056,6 +2166,7 @@ class PipelinesAPI:
|
|
|
2056
2166
|
if dry_run is not None: body['dry_run'] = dry_run
|
|
2057
2167
|
if edition is not None: body['edition'] = edition
|
|
2058
2168
|
if filters is not None: body['filters'] = filters.as_dict()
|
|
2169
|
+
if gateway_definition is not None: body['gateway_definition'] = gateway_definition.as_dict()
|
|
2059
2170
|
if id is not None: body['id'] = id
|
|
2060
2171
|
if ingestion_definition is not None: body['ingestion_definition'] = ingestion_definition.as_dict()
|
|
2061
2172
|
if libraries is not None: body['libraries'] = [v.as_dict() for v in libraries]
|
|
@@ -2385,6 +2496,7 @@ class PipelinesAPI:
|
|
|
2385
2496
|
edition: Optional[str] = None,
|
|
2386
2497
|
expected_last_modified: Optional[int] = None,
|
|
2387
2498
|
filters: Optional[Filters] = None,
|
|
2499
|
+
gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None,
|
|
2388
2500
|
id: Optional[str] = None,
|
|
2389
2501
|
ingestion_definition: Optional[ManagedIngestionPipelineDefinition] = None,
|
|
2390
2502
|
libraries: Optional[List[PipelineLibrary]] = None,
|
|
@@ -2426,6 +2538,8 @@ class PipelinesAPI:
|
|
|
2426
2538
|
modified after that time, then the request will fail with a conflict.
|
|
2427
2539
|
:param filters: :class:`Filters` (optional)
|
|
2428
2540
|
Filters on which Pipeline packages to include in the deployed graph.
|
|
2541
|
+
:param gateway_definition: :class:`IngestionGatewayPipelineDefinition` (optional)
|
|
2542
|
+
The definition of a gateway pipeline to support CDC.
|
|
2429
2543
|
:param id: str (optional)
|
|
2430
2544
|
Unique identifier for this pipeline.
|
|
2431
2545
|
:param ingestion_definition: :class:`ManagedIngestionPipelineDefinition` (optional)
|
|
@@ -2463,6 +2577,7 @@ class PipelinesAPI:
|
|
|
2463
2577
|
if edition is not None: body['edition'] = edition
|
|
2464
2578
|
if expected_last_modified is not None: body['expected_last_modified'] = expected_last_modified
|
|
2465
2579
|
if filters is not None: body['filters'] = filters.as_dict()
|
|
2580
|
+
if gateway_definition is not None: body['gateway_definition'] = gateway_definition.as_dict()
|
|
2466
2581
|
if id is not None: body['id'] = id
|
|
2467
2582
|
if ingestion_definition is not None: body['ingestion_definition'] = ingestion_definition.as_dict()
|
|
2468
2583
|
if libraries is not None: body['libraries'] = [v.as_dict() for v in libraries]
|
|
@@ -8,7 +8,7 @@ import time
|
|
|
8
8
|
from dataclasses import dataclass
|
|
9
9
|
from datetime import timedelta
|
|
10
10
|
from enum import Enum
|
|
11
|
-
from typing import Any, Callable, Dict, Iterator, List, Optional
|
|
11
|
+
from typing import Any, BinaryIO, Callable, Dict, Iterator, List, Optional
|
|
12
12
|
|
|
13
13
|
from ..errors import OperationFailed
|
|
14
14
|
from ._internal import Wait, _enum, _from_dict, _repeated_dict
|
|
@@ -170,6 +170,9 @@ class AppDeployment:
|
|
|
170
170
|
creator: Optional[str] = None
|
|
171
171
|
"""The email of the user creates the deployment."""
|
|
172
172
|
|
|
173
|
+
deployment_artifacts: Optional[AppDeploymentArtifacts] = None
|
|
174
|
+
"""The deployment artifacts for an app."""
|
|
175
|
+
|
|
173
176
|
deployment_id: Optional[str] = None
|
|
174
177
|
"""The unique id of the deployment."""
|
|
175
178
|
|
|
@@ -184,6 +187,7 @@ class AppDeployment:
|
|
|
184
187
|
body = {}
|
|
185
188
|
if self.create_time is not None: body['create_time'] = self.create_time
|
|
186
189
|
if self.creator is not None: body['creator'] = self.creator
|
|
190
|
+
if self.deployment_artifacts: body['deployment_artifacts'] = self.deployment_artifacts.as_dict()
|
|
187
191
|
if self.deployment_id is not None: body['deployment_id'] = self.deployment_id
|
|
188
192
|
if self.source_code_path is not None: body['source_code_path'] = self.source_code_path
|
|
189
193
|
if self.status: body['status'] = self.status.as_dict()
|
|
@@ -195,18 +199,37 @@ class AppDeployment:
|
|
|
195
199
|
"""Deserializes the AppDeployment from a dictionary."""
|
|
196
200
|
return cls(create_time=d.get('create_time', None),
|
|
197
201
|
creator=d.get('creator', None),
|
|
202
|
+
deployment_artifacts=_from_dict(d, 'deployment_artifacts', AppDeploymentArtifacts),
|
|
198
203
|
deployment_id=d.get('deployment_id', None),
|
|
199
204
|
source_code_path=d.get('source_code_path', None),
|
|
200
205
|
status=_from_dict(d, 'status', AppDeploymentStatus),
|
|
201
206
|
update_time=d.get('update_time', None))
|
|
202
207
|
|
|
203
208
|
|
|
209
|
+
@dataclass
|
|
210
|
+
class AppDeploymentArtifacts:
|
|
211
|
+
source_code_path: Optional[str] = None
|
|
212
|
+
"""The source code of the deployment."""
|
|
213
|
+
|
|
214
|
+
def as_dict(self) -> dict:
|
|
215
|
+
"""Serializes the AppDeploymentArtifacts into a dictionary suitable for use as a JSON request body."""
|
|
216
|
+
body = {}
|
|
217
|
+
if self.source_code_path is not None: body['source_code_path'] = self.source_code_path
|
|
218
|
+
return body
|
|
219
|
+
|
|
220
|
+
@classmethod
|
|
221
|
+
def from_dict(cls, d: Dict[str, any]) -> AppDeploymentArtifacts:
|
|
222
|
+
"""Deserializes the AppDeploymentArtifacts from a dictionary."""
|
|
223
|
+
return cls(source_code_path=d.get('source_code_path', None))
|
|
224
|
+
|
|
225
|
+
|
|
204
226
|
class AppDeploymentState(Enum):
|
|
205
227
|
|
|
206
228
|
CANCELLED = 'CANCELLED'
|
|
207
229
|
FAILED = 'FAILED'
|
|
208
230
|
IN_PROGRESS = 'IN_PROGRESS'
|
|
209
231
|
STATE_UNSPECIFIED = 'STATE_UNSPECIFIED'
|
|
232
|
+
STOPPED = 'STOPPED'
|
|
210
233
|
SUCCEEDED = 'SUCCEEDED'
|
|
211
234
|
|
|
212
235
|
|
|
@@ -491,6 +514,9 @@ class CreateServingEndpoint:
|
|
|
491
514
|
"""Rate limits to be applied to the serving endpoint. NOTE: only external and foundation model
|
|
492
515
|
endpoints are supported as of now."""
|
|
493
516
|
|
|
517
|
+
route_optimized: Optional[bool] = None
|
|
518
|
+
"""Enable route optimization for the serving endpoint."""
|
|
519
|
+
|
|
494
520
|
tags: Optional[List[EndpointTag]] = None
|
|
495
521
|
"""Tags to be attached to the serving endpoint and automatically propagated to billing logs."""
|
|
496
522
|
|
|
@@ -500,6 +526,7 @@ class CreateServingEndpoint:
|
|
|
500
526
|
if self.config: body['config'] = self.config.as_dict()
|
|
501
527
|
if self.name is not None: body['name'] = self.name
|
|
502
528
|
if self.rate_limits: body['rate_limits'] = [v.as_dict() for v in self.rate_limits]
|
|
529
|
+
if self.route_optimized is not None: body['route_optimized'] = self.route_optimized
|
|
503
530
|
if self.tags: body['tags'] = [v.as_dict() for v in self.tags]
|
|
504
531
|
return body
|
|
505
532
|
|
|
@@ -509,6 +536,7 @@ class CreateServingEndpoint:
|
|
|
509
536
|
return cls(config=_from_dict(d, 'config', EndpointCoreConfigInput),
|
|
510
537
|
name=d.get('name', None),
|
|
511
538
|
rate_limits=_repeated_dict(d, 'rate_limits', RateLimit),
|
|
539
|
+
route_optimized=d.get('route_optimized', None),
|
|
512
540
|
tags=_repeated_dict(d, 'tags', EndpointTag))
|
|
513
541
|
|
|
514
542
|
|
|
@@ -844,16 +872,18 @@ class EnvVariable:
|
|
|
844
872
|
|
|
845
873
|
@dataclass
|
|
846
874
|
class ExportMetricsResponse:
|
|
875
|
+
contents: Optional[BinaryIO] = None
|
|
847
876
|
|
|
848
877
|
def as_dict(self) -> dict:
|
|
849
878
|
"""Serializes the ExportMetricsResponse into a dictionary suitable for use as a JSON request body."""
|
|
850
879
|
body = {}
|
|
880
|
+
if self.contents: body['contents'] = self.contents
|
|
851
881
|
return body
|
|
852
882
|
|
|
853
883
|
@classmethod
|
|
854
884
|
def from_dict(cls, d: Dict[str, any]) -> ExportMetricsResponse:
|
|
855
885
|
"""Deserializes the ExportMetricsResponse from a dictionary."""
|
|
856
|
-
return cls()
|
|
886
|
+
return cls(contents=d.get('contents', None))
|
|
857
887
|
|
|
858
888
|
|
|
859
889
|
@dataclass
|
|
@@ -1090,14 +1120,24 @@ class ListEndpointsResponse:
|
|
|
1090
1120
|
|
|
1091
1121
|
@dataclass
|
|
1092
1122
|
class OpenAiConfig:
|
|
1093
|
-
|
|
1094
|
-
"""
|
|
1123
|
+
microsoft_entra_client_id: Optional[str] = None
|
|
1124
|
+
"""This field is only required for Azure AD OpenAI and is the Microsoft Entra Client ID."""
|
|
1125
|
+
|
|
1126
|
+
microsoft_entra_client_secret: Optional[str] = None
|
|
1127
|
+
"""The Databricks secret key reference for the Microsoft Entra Client Secret that is only required
|
|
1128
|
+
for Azure AD OpenAI."""
|
|
1129
|
+
|
|
1130
|
+
microsoft_entra_tenant_id: Optional[str] = None
|
|
1131
|
+
"""This field is only required for Azure AD OpenAI and is the Microsoft Entra Tenant ID."""
|
|
1095
1132
|
|
|
1096
1133
|
openai_api_base: Optional[str] = None
|
|
1097
1134
|
"""This is the base URL for the OpenAI API (default: "https://api.openai.com/v1"). For Azure
|
|
1098
1135
|
OpenAI, this field is required, and is the base URL for the Azure OpenAI API service provided by
|
|
1099
1136
|
Azure."""
|
|
1100
1137
|
|
|
1138
|
+
openai_api_key: Optional[str] = None
|
|
1139
|
+
"""The Databricks secret key reference for an OpenAI or Azure OpenAI API key."""
|
|
1140
|
+
|
|
1101
1141
|
openai_api_type: Optional[str] = None
|
|
1102
1142
|
"""This is an optional field to specify the type of OpenAI API to use. For Azure OpenAI, this field
|
|
1103
1143
|
is required, and adjust this parameter to represent the preferred security access validation
|
|
@@ -1118,6 +1158,12 @@ class OpenAiConfig:
|
|
|
1118
1158
|
def as_dict(self) -> dict:
|
|
1119
1159
|
"""Serializes the OpenAiConfig into a dictionary suitable for use as a JSON request body."""
|
|
1120
1160
|
body = {}
|
|
1161
|
+
if self.microsoft_entra_client_id is not None:
|
|
1162
|
+
body['microsoft_entra_client_id'] = self.microsoft_entra_client_id
|
|
1163
|
+
if self.microsoft_entra_client_secret is not None:
|
|
1164
|
+
body['microsoft_entra_client_secret'] = self.microsoft_entra_client_secret
|
|
1165
|
+
if self.microsoft_entra_tenant_id is not None:
|
|
1166
|
+
body['microsoft_entra_tenant_id'] = self.microsoft_entra_tenant_id
|
|
1121
1167
|
if self.openai_api_base is not None: body['openai_api_base'] = self.openai_api_base
|
|
1122
1168
|
if self.openai_api_key is not None: body['openai_api_key'] = self.openai_api_key
|
|
1123
1169
|
if self.openai_api_type is not None: body['openai_api_type'] = self.openai_api_type
|
|
@@ -1130,7 +1176,10 @@ class OpenAiConfig:
|
|
|
1130
1176
|
@classmethod
|
|
1131
1177
|
def from_dict(cls, d: Dict[str, any]) -> OpenAiConfig:
|
|
1132
1178
|
"""Deserializes the OpenAiConfig from a dictionary."""
|
|
1133
|
-
return cls(
|
|
1179
|
+
return cls(microsoft_entra_client_id=d.get('microsoft_entra_client_id', None),
|
|
1180
|
+
microsoft_entra_client_secret=d.get('microsoft_entra_client_secret', None),
|
|
1181
|
+
microsoft_entra_tenant_id=d.get('microsoft_entra_tenant_id', None),
|
|
1182
|
+
openai_api_base=d.get('openai_api_base', None),
|
|
1134
1183
|
openai_api_key=d.get('openai_api_key', None),
|
|
1135
1184
|
openai_api_type=d.get('openai_api_type', None),
|
|
1136
1185
|
openai_api_version=d.get('openai_api_version', None),
|
|
@@ -2123,6 +2172,9 @@ class ServingEndpointDetailed:
|
|
|
2123
2172
|
creator: Optional[str] = None
|
|
2124
2173
|
"""The email of the user who created the serving endpoint."""
|
|
2125
2174
|
|
|
2175
|
+
endpoint_url: Optional[str] = None
|
|
2176
|
+
"""Endpoint invocation url if route optimization is enabled for endpoint"""
|
|
2177
|
+
|
|
2126
2178
|
id: Optional[str] = None
|
|
2127
2179
|
"""System-generated ID of the endpoint. This is used to refer to the endpoint in the Permissions
|
|
2128
2180
|
API"""
|
|
@@ -2139,6 +2191,9 @@ class ServingEndpointDetailed:
|
|
|
2139
2191
|
permission_level: Optional[ServingEndpointDetailedPermissionLevel] = None
|
|
2140
2192
|
"""The permission level of the principal making the request."""
|
|
2141
2193
|
|
|
2194
|
+
route_optimized: Optional[bool] = None
|
|
2195
|
+
"""Boolean representing if route optimization has been enabled for the endpoint"""
|
|
2196
|
+
|
|
2142
2197
|
state: Optional[EndpointState] = None
|
|
2143
2198
|
"""Information corresponding to the state of the serving endpoint."""
|
|
2144
2199
|
|
|
@@ -2154,12 +2209,14 @@ class ServingEndpointDetailed:
|
|
|
2154
2209
|
if self.config: body['config'] = self.config.as_dict()
|
|
2155
2210
|
if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
|
|
2156
2211
|
if self.creator is not None: body['creator'] = self.creator
|
|
2212
|
+
if self.endpoint_url is not None: body['endpoint_url'] = self.endpoint_url
|
|
2157
2213
|
if self.id is not None: body['id'] = self.id
|
|
2158
2214
|
if self.last_updated_timestamp is not None:
|
|
2159
2215
|
body['last_updated_timestamp'] = self.last_updated_timestamp
|
|
2160
2216
|
if self.name is not None: body['name'] = self.name
|
|
2161
2217
|
if self.pending_config: body['pending_config'] = self.pending_config.as_dict()
|
|
2162
2218
|
if self.permission_level is not None: body['permission_level'] = self.permission_level.value
|
|
2219
|
+
if self.route_optimized is not None: body['route_optimized'] = self.route_optimized
|
|
2163
2220
|
if self.state: body['state'] = self.state.as_dict()
|
|
2164
2221
|
if self.tags: body['tags'] = [v.as_dict() for v in self.tags]
|
|
2165
2222
|
if self.task is not None: body['task'] = self.task
|
|
@@ -2171,11 +2228,13 @@ class ServingEndpointDetailed:
|
|
|
2171
2228
|
return cls(config=_from_dict(d, 'config', EndpointCoreConfigOutput),
|
|
2172
2229
|
creation_timestamp=d.get('creation_timestamp', None),
|
|
2173
2230
|
creator=d.get('creator', None),
|
|
2231
|
+
endpoint_url=d.get('endpoint_url', None),
|
|
2174
2232
|
id=d.get('id', None),
|
|
2175
2233
|
last_updated_timestamp=d.get('last_updated_timestamp', None),
|
|
2176
2234
|
name=d.get('name', None),
|
|
2177
2235
|
pending_config=_from_dict(d, 'pending_config', EndpointPendingConfig),
|
|
2178
2236
|
permission_level=_enum(d, 'permission_level', ServingEndpointDetailedPermissionLevel),
|
|
2237
|
+
route_optimized=d.get('route_optimized', None),
|
|
2179
2238
|
state=_from_dict(d, 'state', EndpointState),
|
|
2180
2239
|
tags=_repeated_dict(d, 'tags', EndpointTag),
|
|
2181
2240
|
task=d.get('task', None))
|
|
@@ -2760,6 +2819,7 @@ class ServingEndpointsAPI:
|
|
|
2760
2819
|
config: EndpointCoreConfigInput,
|
|
2761
2820
|
*,
|
|
2762
2821
|
rate_limits: Optional[List[RateLimit]] = None,
|
|
2822
|
+
route_optimized: Optional[bool] = None,
|
|
2763
2823
|
tags: Optional[List[EndpointTag]] = None) -> Wait[ServingEndpointDetailed]:
|
|
2764
2824
|
"""Create a new serving endpoint.
|
|
2765
2825
|
|
|
@@ -2771,6 +2831,8 @@ class ServingEndpointsAPI:
|
|
|
2771
2831
|
:param rate_limits: List[:class:`RateLimit`] (optional)
|
|
2772
2832
|
Rate limits to be applied to the serving endpoint. NOTE: only external and foundation model
|
|
2773
2833
|
endpoints are supported as of now.
|
|
2834
|
+
:param route_optimized: bool (optional)
|
|
2835
|
+
Enable route optimization for the serving endpoint.
|
|
2774
2836
|
:param tags: List[:class:`EndpointTag`] (optional)
|
|
2775
2837
|
Tags to be attached to the serving endpoint and automatically propagated to billing logs.
|
|
2776
2838
|
|
|
@@ -2782,6 +2844,7 @@ class ServingEndpointsAPI:
|
|
|
2782
2844
|
if config is not None: body['config'] = config.as_dict()
|
|
2783
2845
|
if name is not None: body['name'] = name
|
|
2784
2846
|
if rate_limits is not None: body['rate_limits'] = [v.as_dict() for v in rate_limits]
|
|
2847
|
+
if route_optimized is not None: body['route_optimized'] = route_optimized
|
|
2785
2848
|
if tags is not None: body['tags'] = [v.as_dict() for v in tags]
|
|
2786
2849
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
2787
2850
|
|
|
@@ -2796,9 +2859,13 @@ class ServingEndpointsAPI:
|
|
|
2796
2859
|
config: EndpointCoreConfigInput,
|
|
2797
2860
|
*,
|
|
2798
2861
|
rate_limits: Optional[List[RateLimit]] = None,
|
|
2862
|
+
route_optimized: Optional[bool] = None,
|
|
2799
2863
|
tags: Optional[List[EndpointTag]] = None,
|
|
2800
2864
|
timeout=timedelta(minutes=20)) -> ServingEndpointDetailed:
|
|
2801
|
-
return self.create(config=config,
|
|
2865
|
+
return self.create(config=config,
|
|
2866
|
+
name=name,
|
|
2867
|
+
rate_limits=rate_limits,
|
|
2868
|
+
route_optimized=route_optimized,
|
|
2802
2869
|
tags=tags).result(timeout=timeout)
|
|
2803
2870
|
|
|
2804
2871
|
def delete(self, name: str):
|
|
@@ -2814,7 +2881,7 @@ class ServingEndpointsAPI:
|
|
|
2814
2881
|
|
|
2815
2882
|
self._api.do('DELETE', f'/api/2.0/serving-endpoints/{name}', headers=headers)
|
|
2816
2883
|
|
|
2817
|
-
def export_metrics(self, name: str):
|
|
2884
|
+
def export_metrics(self, name: str) -> ExportMetricsResponse:
|
|
2818
2885
|
"""Get metrics of a serving endpoint.
|
|
2819
2886
|
|
|
2820
2887
|
Retrieves the metrics associated with the provided serving endpoint in either Prometheus or
|
|
@@ -2823,12 +2890,13 @@ class ServingEndpointsAPI:
|
|
|
2823
2890
|
:param name: str
|
|
2824
2891
|
The name of the serving endpoint to retrieve metrics for. This field is required.
|
|
2825
2892
|
|
|
2826
|
-
|
|
2893
|
+
:returns: :class:`ExportMetricsResponse`
|
|
2827
2894
|
"""
|
|
2828
2895
|
|
|
2829
|
-
headers = {}
|
|
2896
|
+
headers = {'Accept': 'text/plain', }
|
|
2830
2897
|
|
|
2831
|
-
self._api.do('GET', f'/api/2.0/serving-endpoints/{name}/metrics', headers=headers)
|
|
2898
|
+
res = self._api.do('GET', f'/api/2.0/serving-endpoints/{name}/metrics', headers=headers, raw=True)
|
|
2899
|
+
return ExportMetricsResponse.from_dict(res)
|
|
2832
2900
|
|
|
2833
2901
|
def get(self, name: str) -> ServingEndpointDetailed:
|
|
2834
2902
|
"""Get a single serving endpoint.
|