msfabricpysdkcore 0.2.5__py3-none-any.whl → 0.2.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- msfabricpysdkcore/coreapi.py +742 -32
- msfabricpysdkcore/otheritems.py +61 -1
- msfabricpysdkcore/workspace.py +161 -9
- {msfabricpysdkcore-0.2.5.dist-info → msfabricpysdkcore-0.2.7.dist-info}/METADATA +4 -2
- msfabricpysdkcore-0.2.7.dist-info/RECORD +30 -0
- msfabricpysdkcore/tests/__init__.py +0 -0
- msfabricpysdkcore/tests/test_admin_apis.py +0 -174
- msfabricpysdkcore/tests/test_admin_tags.py +0 -46
- msfabricpysdkcore/tests/test_connection.py +0 -111
- msfabricpysdkcore/tests/test_copy_jobs.py +0 -60
- msfabricpysdkcore/tests/test_dataflows.py +0 -60
- msfabricpysdkcore/tests/test_datapipelines.py +0 -60
- msfabricpysdkcore/tests/test_deployment_pipeline.py +0 -63
- msfabricpysdkcore/tests/test_deployment_pipelinev2.py +0 -135
- msfabricpysdkcore/tests/test_domains.py +0 -126
- msfabricpysdkcore/tests/test_environments.py +0 -114
- msfabricpysdkcore/tests/test_evenhouses.py +0 -56
- msfabricpysdkcore/tests/test_evenstreams.py +0 -52
- msfabricpysdkcore/tests/test_eventstream_topology.py +0 -82
- msfabricpysdkcore/tests/test_external_data_shares.py +0 -51
- msfabricpysdkcore/tests/test_fabric_azure_client.py +0 -80
- msfabricpysdkcore/tests/test_folders.py +0 -53
- msfabricpysdkcore/tests/test_gateways.py +0 -99
- msfabricpysdkcore/tests/test_git.py +0 -66
- msfabricpysdkcore/tests/test_graphqlapi.py +0 -44
- msfabricpysdkcore/tests/test_items.py +0 -97
- msfabricpysdkcore/tests/test_jobs.py +0 -96
- msfabricpysdkcore/tests/test_kql_dashboards.py +0 -63
- msfabricpysdkcore/tests/test_kql_queryset.py +0 -64
- msfabricpysdkcore/tests/test_kqldatabases.py +0 -56
- msfabricpysdkcore/tests/test_lakehouse.py +0 -93
- msfabricpysdkcore/tests/test_managed_private_endpoints.py +0 -61
- msfabricpysdkcore/tests/test_mirroreddatabases.py +0 -80
- msfabricpysdkcore/tests/test_ml_experiments.py +0 -47
- msfabricpysdkcore/tests/test_ml_models.py +0 -47
- msfabricpysdkcore/tests/test_mounted_adf.py +0 -64
- msfabricpysdkcore/tests/test_notebooks.py +0 -57
- msfabricpysdkcore/tests/test_one_lake_data_access_security.py +0 -63
- msfabricpysdkcore/tests/test_other_items.py +0 -45
- msfabricpysdkcore/tests/test_reflex.py +0 -57
- msfabricpysdkcore/tests/test_reports.py +0 -56
- msfabricpysdkcore/tests/test_semantic_model.py +0 -56
- msfabricpysdkcore/tests/test_shortcuts.py +0 -60
- msfabricpysdkcore/tests/test_spark.py +0 -91
- msfabricpysdkcore/tests/test_sparkjobdefinition.py +0 -55
- msfabricpysdkcore/tests/test_sqldatabases.py +0 -45
- msfabricpysdkcore/tests/test_tags.py +0 -28
- msfabricpysdkcore/tests/test_variable_libary.py +0 -61
- msfabricpysdkcore/tests/test_warehouses.py +0 -50
- msfabricpysdkcore/tests/test_workspaces_capacities.py +0 -159
- msfabricpysdkcore-0.2.5.dist-info/RECORD +0 -75
- {msfabricpysdkcore-0.2.5.dist-info → msfabricpysdkcore-0.2.7.dist-info}/WHEEL +0 -0
- {msfabricpysdkcore-0.2.5.dist-info → msfabricpysdkcore-0.2.7.dist-info}/licenses/LICENSE +0 -0
- {msfabricpysdkcore-0.2.5.dist-info → msfabricpysdkcore-0.2.7.dist-info}/top_level.txt +0 -0
msfabricpysdkcore/coreapi.py
CHANGED
@@ -707,6 +707,22 @@ class FabricClientCore(FabricClient):
|
|
707
707
|
response = self.calling_routine(url, operation="POST", response_codes=[200, 429], error_message="Error revoking external data share", return_format="response")
|
708
708
|
return response.status_code
|
709
709
|
|
710
|
+
# DELETE https://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/items/{itemId}/externalDataShares/{externalDataShareId}
|
711
|
+
def delete_external_data_share(self, workspace_id, item_id, external_data_share_id):
|
712
|
+
"""Delete an external data share in an item
|
713
|
+
Args:
|
714
|
+
workspace_id (str): The ID of the workspace
|
715
|
+
item_id (str): The ID of the item
|
716
|
+
external_data_share_id (str): The ID of the external data share
|
717
|
+
Returns:
|
718
|
+
int: The status code of the response
|
719
|
+
"""
|
720
|
+
url = f"https://api.fabric.microsoft.com/v1/workspaces/{workspace_id}/items/{item_id}/externalDataShares/{external_data_share_id}"
|
721
|
+
|
722
|
+
response = self.calling_routine(url, operation="DELETE", response_codes=[200, 429], return_format="response",
|
723
|
+
error_message="Error deleting external data share")
|
724
|
+
return response.status_code
|
725
|
+
|
710
726
|
# External Data Shares Recipient
|
711
727
|
|
712
728
|
# POST https://api.fabric.microsoft.com/v1/externalDataShares/invitations/{invitationId}/accept
|
@@ -1280,6 +1296,8 @@ class FabricClientCore(FabricClient):
|
|
1280
1296
|
"""
|
1281
1297
|
from msfabricpysdkcore.item import Item
|
1282
1298
|
|
1299
|
+
if item_dict["type"] == "ApacheAirflowJob":
|
1300
|
+
return self.get_apache_airflow_job(workspace_id, item_dict["id"])
|
1283
1301
|
if item_dict["type"] == "CopyJob":
|
1284
1302
|
return self.get_copy_job(workspace_id, item_dict["id"])
|
1285
1303
|
if item_dict["type"] == "VariableLibrary":
|
@@ -1288,6 +1306,10 @@ class FabricClientCore(FabricClient):
|
|
1288
1306
|
return self.get_dataflow(workspace_id, item_dict["id"])
|
1289
1307
|
if item_dict["type"] == "DataPipeline":
|
1290
1308
|
return self.get_data_pipeline(workspace_id, item_dict["id"])
|
1309
|
+
if item_dict["type"] == "DigitalTwinBuilder":
|
1310
|
+
return self.get_digital_twin_builder(workspace_id, item_dict["id"])
|
1311
|
+
if item_dict["type"] == "DigitalTwinBuilderFlow":
|
1312
|
+
return self.get_digital_twin_builder_flow(workspace_id, item_dict["id"])
|
1291
1313
|
if item_dict["type"] == "Eventstream":
|
1292
1314
|
return self.get_eventstream(workspace_id, item_dict["id"])
|
1293
1315
|
if item_dict["type"] == "Eventhouse":
|
@@ -1300,6 +1322,8 @@ class FabricClientCore(FabricClient):
|
|
1300
1322
|
return self.get_kql_queryset(workspace_id, item_dict["id"])
|
1301
1323
|
if item_dict["type"] == "Lakehouse":
|
1302
1324
|
return self.get_lakehouse(workspace_id, item_dict["id"])
|
1325
|
+
if item_dict["type"] == "MirroredAzureDatabricksCatalog":
|
1326
|
+
return self.get_mirrored_azure_databricks_catalog(workspace_id, item_dict["id"])
|
1303
1327
|
if item_dict["type"] == "MirroredDatabase":
|
1304
1328
|
return self.get_mirrored_database(workspace_id, item_dict["id"])
|
1305
1329
|
if item_dict["type"] == "MLExperiment":
|
@@ -1316,6 +1340,8 @@ class FabricClientCore(FabricClient):
|
|
1316
1340
|
return self.get_spark_job_definition(workspace_id, item_dict["id"])
|
1317
1341
|
if item_dict["type"] == "Warehouse":
|
1318
1342
|
return self.get_warehouse(workspace_id, item_dict["id"])
|
1343
|
+
if item_dict["type"] == "WarehouseSnapshot":
|
1344
|
+
return self.get_warehouse_snapshot(workspace_id, item_dict["id"])
|
1319
1345
|
if item_dict["type"] == "Environment":
|
1320
1346
|
return self.get_environment(workspace_id, item_dict["id"])
|
1321
1347
|
|
@@ -1336,7 +1362,7 @@ class FabricClientCore(FabricClient):
|
|
1336
1362
|
|
1337
1363
|
# Create
|
1338
1364
|
|
1339
|
-
def create_item(self, workspace_id, display_name, type, definition = None, description = None, wait_for_completion = True, **kwargs):
|
1365
|
+
def create_item(self, workspace_id, display_name, type, definition = None, description = None, wait_for_completion = True, creation_payload = None, folder_id = None, **kwargs):
|
1340
1366
|
"""Create an item in a workspace
|
1341
1367
|
Args:
|
1342
1368
|
workspace_id (str): The ID of the workspace
|
@@ -1359,11 +1385,20 @@ class FabricClientCore(FabricClient):
|
|
1359
1385
|
body['definition'] = definition
|
1360
1386
|
if description:
|
1361
1387
|
body['description'] = description
|
1388
|
+
|
1389
|
+
if creation_payload:
|
1390
|
+
body["creationPayload"] = creation_payload
|
1391
|
+
|
1392
|
+
if folder_id:
|
1393
|
+
body['folderId'] = folder_id
|
1362
1394
|
|
1363
|
-
if type in ["
|
1395
|
+
if type in ["ApacheAirflowJobs",
|
1396
|
+
"copyJobs",
|
1364
1397
|
"VariableLibraries",
|
1365
1398
|
"dataflows",
|
1366
1399
|
"dataPipelines",
|
1400
|
+
"digitaltwinbuilders",
|
1401
|
+
"DigitalTwinBuilderFlows",
|
1367
1402
|
"environments",
|
1368
1403
|
"eventhouses",
|
1369
1404
|
"eventstreams",
|
@@ -1372,6 +1407,7 @@ class FabricClientCore(FabricClient):
|
|
1372
1407
|
"kqlDashboards",
|
1373
1408
|
"kqlQuerysets",
|
1374
1409
|
"lakehouses",
|
1410
|
+
"mirroredAzureDatabricksCatalogs",
|
1375
1411
|
"mirroredDatabases",
|
1376
1412
|
"mlExperiments",
|
1377
1413
|
"mlModels",
|
@@ -1382,16 +1418,14 @@ class FabricClientCore(FabricClient):
|
|
1382
1418
|
"semanticModels",
|
1383
1419
|
"sparkJobDefinitions",
|
1384
1420
|
"SQLDatabases",
|
1385
|
-
"warehouses"
|
1421
|
+
"warehouses",
|
1422
|
+
"warehousesnapshots"]:
|
1386
1423
|
|
1387
|
-
if type == "lakehouses":
|
1388
|
-
if "creation_payload" in kwargs:
|
1389
|
-
body["creationPayload"] = kwargs["creation_payload"]
|
1390
1424
|
|
1391
1425
|
if type == "kqlDatabases":
|
1392
|
-
if
|
1426
|
+
if creation_payload is None:
|
1393
1427
|
raise Exception("creation_payload is required for KQLDatabase")
|
1394
|
-
body["creationPayload"] =
|
1428
|
+
body["creationPayload"] = creation_payload
|
1395
1429
|
|
1396
1430
|
url = f"https://api.fabric.microsoft.com/v1/workspaces/{workspace_id}/{type}"
|
1397
1431
|
body.pop('type')
|
@@ -1407,9 +1441,12 @@ class FabricClientCore(FabricClient):
|
|
1407
1441
|
item = None
|
1408
1442
|
i = 0
|
1409
1443
|
|
1410
|
-
type_mapping = {"
|
1444
|
+
type_mapping = {"ApacheAirflowJobs": "ApacheAirflowJob",
|
1445
|
+
"copyJobs": "CopyJob",
|
1411
1446
|
"VariableLibraries": "VariableLibrary",
|
1412
1447
|
"dataflows": "Dataflow",
|
1448
|
+
"digitaltwinbuilders": "DigitalTwinBuilder",
|
1449
|
+
"DigitalTwinBuilderFlows":"DigitalTwinBuilderFlow",
|
1413
1450
|
"dataPipelines": "DataPipeline",
|
1414
1451
|
"environments": "Environment",
|
1415
1452
|
"eventhouses": "Eventhouse",
|
@@ -1419,6 +1456,7 @@ class FabricClientCore(FabricClient):
|
|
1419
1456
|
"kqlDatabases": "KQLDatabase",
|
1420
1457
|
"kqlQuerysets": "KQLQueryset",
|
1421
1458
|
"lakehouses": "Lakehouse",
|
1459
|
+
"mirroredAzureDatabricksCatalogs": "MirroredAzureDatabricksCatalog",
|
1422
1460
|
"mirroredDatabases": "MirroredDatabase",
|
1423
1461
|
"mlExperiments": "MLExperiment",
|
1424
1462
|
"mlModels": "MLModel",
|
@@ -1429,7 +1467,8 @@ class FabricClientCore(FabricClient):
|
|
1429
1467
|
"semanticModels": "SemanticModel",
|
1430
1468
|
"sparkJobDefinitions": "SparkJobDefinition",
|
1431
1469
|
"SQLDatabases": "SQLDatabase",
|
1432
|
-
"warehouses": "Warehouse"
|
1470
|
+
"warehouses": "Warehouse",
|
1471
|
+
"warehousesnapshots": "WarehouseSnapshot"
|
1433
1472
|
}
|
1434
1473
|
|
1435
1474
|
if type in type_mapping.keys():
|
@@ -1587,7 +1626,7 @@ class FabricClientCore(FabricClient):
|
|
1587
1626
|
return_format="json+operation_result")
|
1588
1627
|
|
1589
1628
|
|
1590
|
-
def update_item(self, workspace_id, item_id, display_name = None, description = None, type = None, return_item=False):
|
1629
|
+
def update_item(self, workspace_id, item_id, display_name = None, description = None, type = None, return_item=False, **kwargs):
|
1591
1630
|
"""Update the item
|
1592
1631
|
Args:
|
1593
1632
|
workspace_id (str): The ID of the workspace
|
@@ -1607,6 +1646,8 @@ class FabricClientCore(FabricClient):
|
|
1607
1646
|
payload['displayName'] = display_name
|
1608
1647
|
if description:
|
1609
1648
|
payload['description'] = description
|
1649
|
+
if "properties" in kwargs:
|
1650
|
+
payload['properties'] = kwargs["properties"]
|
1610
1651
|
|
1611
1652
|
resp_dict = self.calling_routine(url, operation="PATCH", body=payload,
|
1612
1653
|
response_codes=[200, 429], error_message="Error updating item",
|
@@ -2403,6 +2444,115 @@ class FabricClientCore(FabricClient):
|
|
2403
2444
|
"""List mirrored warehouses in a workspace"""
|
2404
2445
|
return self.list_items(workspace_id, type="mirroredWarehouses")
|
2405
2446
|
|
2447
|
+
|
2448
|
+
#airflowjob
|
2449
|
+
# POST https://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/ApacheAirflowJobs
|
2450
|
+
def create_apache_airflow_job(self, workspace_id, display_name, definition = None, description = None, folder_id = None):
|
2451
|
+
"""Create an Apache Airflow job in a workspace
|
2452
|
+
Args:
|
2453
|
+
workspace_id (str): The ID of the workspace
|
2454
|
+
display_name (str): The display name of the Apache Airflow job
|
2455
|
+
definition (dict): The definition of the Apache Airflow job
|
2456
|
+
description (str): The description of the Apache Airflow job
|
2457
|
+
folder_id (str): The ID of the folder to create the job in
|
2458
|
+
Returns:
|
2459
|
+
ApacheAirflowJob: The created Apache Airflow job object
|
2460
|
+
"""
|
2461
|
+
return self.create_item(workspace_id=workspace_id,
|
2462
|
+
display_name = display_name,
|
2463
|
+
type = "ApacheAirflowJobs",
|
2464
|
+
definition = definition,
|
2465
|
+
description = description, folder_id=folder_id)
|
2466
|
+
|
2467
|
+
# DELETE https://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/ApacheAirflowJobs/{ApacheAirflowJobId}
|
2468
|
+
def delete_apache_airflow_job(self, workspace_id, apache_airflow_job_id):
|
2469
|
+
"""Delete an Apache Airflow job from a workspace
|
2470
|
+
Args:
|
2471
|
+
workspace_id (str): The ID of the workspace
|
2472
|
+
apache_airflow_job_id (str): The ID of the Apache Airflow job
|
2473
|
+
Returns:
|
2474
|
+
int: The status code of the response
|
2475
|
+
"""
|
2476
|
+
return self.delete_item(workspace_id, item_id=apache_airflow_job_id, type="ApacheAirflowJobs")
|
2477
|
+
|
2478
|
+
# GET https://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/ApacheAirflowJobs/{ApacheAirflowJobId}
|
2479
|
+
def get_apache_airflow_job(self, workspace_id, apache_airflow_job_id = None, apache_airflow_job_name = None):
|
2480
|
+
"""Get an Apache Airflow job from a workspace
|
2481
|
+
Args:
|
2482
|
+
workspace_id (str): The ID of the workspace
|
2483
|
+
apache_airflow_job_id (str): The ID of the Apache Airflow job
|
2484
|
+
apache_airflow_job_name (str): The name of the Apache Airflow job
|
2485
|
+
Returns:
|
2486
|
+
ApacheAirflowJob: The Apache Airflow job object
|
2487
|
+
"""
|
2488
|
+
from msfabricpysdkcore.otheritems import ApacheAirflowJob
|
2489
|
+
|
2490
|
+
if apache_airflow_job_id is None and apache_airflow_job_name is not None:
|
2491
|
+
apache_airflow_jobs = self.list_apache_airflow_jobs(workspace_id)
|
2492
|
+
aajs = [aaj for aaj in apache_airflow_jobs if aaj.display_name == apache_airflow_job_name]
|
2493
|
+
if len(aajs) == 0:
|
2494
|
+
raise Exception(f"Apache Airflow job with name {apache_airflow_job_name} not found")
|
2495
|
+
apache_airflow_job_id = aajs[0].id
|
2496
|
+
elif apache_airflow_job_id is None:
|
2497
|
+
raise Exception("apache_airflow_job_id or the apache_airflow_job_name is required")
|
2498
|
+
url = f"https://api.fabric.microsoft.com/v1/workspaces/{workspace_id}/ApacheAirflowJobs/{apache_airflow_job_id}"
|
2499
|
+
item_dict = self.calling_routine(url, operation="GET", response_codes=[200, 429],
|
2500
|
+
error_message="Error getting Apache Airflow job", return_format="json")
|
2501
|
+
aaj = ApacheAirflowJob.from_dict(item_dict, core_client=self)
|
2502
|
+
aaj.get_definition()
|
2503
|
+
return aaj
|
2504
|
+
|
2505
|
+
# POST https://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/ApacheAirflowJobs/{ApacheAirflowJobId}/getDefinition
|
2506
|
+
def get_apache_airflow_job_definition(self, workspace_id, apache_airflow_job_id, format = None):
|
2507
|
+
"""Get the definition of an Apache Airflow job
|
2508
|
+
Args:
|
2509
|
+
workspace_id (str): The ID of the workspace
|
2510
|
+
apache_airflow_job_id (str): The ID of the Apache Airflow job
|
2511
|
+
format (str): The format of the definition
|
2512
|
+
Returns:
|
2513
|
+
dict: The Apache Airflow job definition
|
2514
|
+
"""
|
2515
|
+
return self.get_item_definition(workspace_id, apache_airflow_job_id, type="ApacheAirflowJobs", format=format)
|
2516
|
+
|
2517
|
+
# GET https://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/ApacheAirflowJobs
|
2518
|
+
def list_apache_airflow_jobs(self, workspace_id, with_properties = False):
|
2519
|
+
"""List Apache Airflow jobs in a workspace
|
2520
|
+
Args:
|
2521
|
+
workspace_id (str): The ID of the workspace
|
2522
|
+
with_properties (bool): Whether to get the item object with properties
|
2523
|
+
Returns:
|
2524
|
+
list: The list of Apache Airflow jobs
|
2525
|
+
"""
|
2526
|
+
return self.list_items(workspace_id, type="ApacheAirflowJobs", with_properties=with_properties)
|
2527
|
+
|
2528
|
+
# PATCH https://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/ApacheAirflowJobs/{ApacheAirflowJobId}
|
2529
|
+
def update_apache_airflow_job(self, workspace_id, apache_airflow_job_id,
|
2530
|
+
display_name = None, description = None, return_item=False):
|
2531
|
+
"""Update an Apache Airflow job in a workspace
|
2532
|
+
Args:
|
2533
|
+
workspace_id (str): The ID of the workspace
|
2534
|
+
apache_airflow_job_id (str): The ID of the Apache Airflow job
|
2535
|
+
display_name (str): The display name of the Apache Airflow job
|
2536
|
+
description (str): The description of the Apache Airflow job
|
2537
|
+
Returns:
|
2538
|
+
dict: The updated Apache Airflow job or ApacheAirflowJob object if return_item is True
|
2539
|
+
"""
|
2540
|
+
return self.update_item(workspace_id, item_id=apache_airflow_job_id, display_name=display_name, description=description, type="ApacheAirflowJobs",
|
2541
|
+
return_item=return_item)
|
2542
|
+
|
2543
|
+
# POST https://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/ApacheAirflowJobs/{ApacheAirflowJobId}/updateDefinition
|
2544
|
+
def update_apache_airflow_job_definition(self, workspace_id, apache_airflow_job_id, definition, update_metadata = None):
|
2545
|
+
"""Update the definition of an Apache Airflow job
|
2546
|
+
Args:
|
2547
|
+
workspace_id (str): The ID of the workspace
|
2548
|
+
apache_airflow_job_id (str): The ID of the Apache Airflow job
|
2549
|
+
definition (dict): The definition of the Apache Airflow job
|
2550
|
+
update_metadata (bool): Whether to update the metadata
|
2551
|
+
Returns:
|
2552
|
+
dict: The updated Apache Airflow job definition
|
2553
|
+
"""
|
2554
|
+
return self.update_item_definition(workspace_id, apache_airflow_job_id, type="ApacheAirflowJobs", definition=definition, update_metadata=update_metadata)
|
2555
|
+
|
2406
2556
|
# copyJobs
|
2407
2557
|
# POST https://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/copyJobs
|
2408
2558
|
def create_copy_job(self, workspace_id, display_name, definition = None, description = None):
|
@@ -2719,6 +2869,87 @@ class FabricClientCore(FabricClient):
|
|
2719
2869
|
"""
|
2720
2870
|
return self.update_item_definition(workspace_id, dataflow_id, type="dataflows", definition=definition, update_metadata=update_metadata)
|
2721
2871
|
|
2872
|
+
# POST https://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/dataflows/{dataflowId}/jobs/instances?jobType={jobType}
|
2873
|
+
def run_on_demand_apply_changes(self, workspace_id, dataflow_id, job_type = "ApplyChanges", wait_for_completion = True):
|
2874
|
+
"""Run an on-demand apply changes job for a dataflow
|
2875
|
+
Args:
|
2876
|
+
workspace_id (str): The ID of the workspace
|
2877
|
+
dataflow_id (str): The ID of the dataflow
|
2878
|
+
job_type (str): The type of the job, default is "ApplyChanges"
|
2879
|
+
wait_for_completion (bool): Whether to wait for the operation to complete
|
2880
|
+
Returns:
|
2881
|
+
requests.Response: The response object
|
2882
|
+
"""
|
2883
|
+
url = f"https://api.fabric.microsoft.com/v1/workspaces/{workspace_id}/dataflows/{dataflow_id}/jobs/instances?jobType={job_type}"
|
2884
|
+
|
2885
|
+
response = self.calling_routine(url, operation="POST", response_codes=[202, 429], error_message="Error running on-demand apply changes",
|
2886
|
+
return_format="response", wait_for_completion=wait_for_completion)
|
2887
|
+
|
2888
|
+
return response
|
2889
|
+
|
2890
|
+
def run_on_demand_execute(self, workspace_id, dataflow_id, job_type = "Execute", wait_for_completion = True):
|
2891
|
+
"""Run an on-demand execute job for a dataflow
|
2892
|
+
Args:
|
2893
|
+
workspace_id (str): The ID of the workspace
|
2894
|
+
dataflow_id (str): The ID of the dataflow
|
2895
|
+
job_type (str): The type of the job, default is "Execute"
|
2896
|
+
wait_for_completion (bool): Whether to wait for the operation to complete
|
2897
|
+
Returns:
|
2898
|
+
requests.Response: The response object
|
2899
|
+
"""
|
2900
|
+
url = f"https://api.fabric.microsoft.com/v1/workspaces/{workspace_id}/dataflows/{dataflow_id}/jobs/instances?jobType={job_type}"
|
2901
|
+
|
2902
|
+
response = self.calling_routine(url, operation="POST", response_codes=[202, 429], error_message="Error running on-demand execute",
|
2903
|
+
return_format="response", wait_for_completion=wait_for_completion)
|
2904
|
+
|
2905
|
+
return response
|
2906
|
+
|
2907
|
+
# POST https://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/dataflows/{dataflowId}/jobs/ApplyChanges/schedules
|
2908
|
+
def schedule_apply_changes(self, workspace_id, dataflow_id, configuration, enabled):
|
2909
|
+
"""Schedule an apply changes job for a dataflow
|
2910
|
+
Args:
|
2911
|
+
workspace_id (str): The ID of the workspace
|
2912
|
+
dataflow_id (str): The ID of the dataflow
|
2913
|
+
configuration (dict): The configuration of the schedule
|
2914
|
+
enabled (bool): Whether the schedule is enabled
|
2915
|
+
Returns:
|
2916
|
+
dict: The schedule object
|
2917
|
+
"""
|
2918
|
+
url = f"https://api.fabric.microsoft.com/v1/workspaces/{workspace_id}/dataflows/{dataflow_id}/jobs/ApplyChanges/schedules"
|
2919
|
+
|
2920
|
+
body = {
|
2921
|
+
"configuration": configuration,
|
2922
|
+
"enabled": enabled
|
2923
|
+
}
|
2924
|
+
|
2925
|
+
response_dict = self.calling_routine(url, operation="POST", body=body, response_codes=[201, 429], error_message="Error scheduling apply changes",
|
2926
|
+
return_format="json")
|
2927
|
+
|
2928
|
+
return response_dict
|
2929
|
+
|
2930
|
+
def schedule_execute(self, workspace_id, dataflow_id, configuration, enabled):
|
2931
|
+
"""Schedule an execute job for a dataflow
|
2932
|
+
Args:
|
2933
|
+
workspace_id (str): The ID of the workspace
|
2934
|
+
dataflow_id (str): The ID of the dataflow
|
2935
|
+
configuration (dict): The configuration of the schedule
|
2936
|
+
enabled (bool): Whether the schedule is enabled
|
2937
|
+
Returns:
|
2938
|
+
dict: The schedule object
|
2939
|
+
"""
|
2940
|
+
url = f"https://api.fabric.microsoft.com/v1/workspaces/{workspace_id}/dataflows/{dataflow_id}/jobs/Execute/schedules"
|
2941
|
+
|
2942
|
+
body = {
|
2943
|
+
"configuration": configuration,
|
2944
|
+
"enabled": enabled
|
2945
|
+
}
|
2946
|
+
|
2947
|
+
response_dict = self.calling_routine(url, operation="POST", body=body, response_codes=[201, 429], error_message="Error scheduling execute",
|
2948
|
+
return_format="json")
|
2949
|
+
|
2950
|
+
return response_dict
|
2951
|
+
|
2952
|
+
|
2722
2953
|
|
2723
2954
|
# dataPipelines
|
2724
2955
|
|
@@ -2816,6 +3047,210 @@ class FabricClientCore(FabricClient):
|
|
2816
3047
|
"""
|
2817
3048
|
return self.update_item_definition(workspace_id, data_pipeline_id, type="dataPipelines", definition=definition, update_metadata=update_metadata)
|
2818
3049
|
|
3050
|
+
# digitaltwinbuilder
|
3051
|
+
# POST https://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/digitaltwinbuilders
|
3052
|
+
def create_digital_twin_builder(self, workspace_id, display_name, definition = None, description = None, folder_id = None):
|
3053
|
+
"""Create a digital twin builder in a workspace
|
3054
|
+
Args:
|
3055
|
+
workspace_id (str): The ID of the workspace
|
3056
|
+
display_name (str): The display name of the digital twin builder
|
3057
|
+
definition (dict): The definition of the digital twin builder
|
3058
|
+
description (str): The description of the digital twin builder
|
3059
|
+
folder_id (str): The ID of the folder to create the digital twin builder in
|
3060
|
+
Returns:
|
3061
|
+
DigitalTwinBuilder: The created digital twin builder object
|
3062
|
+
"""
|
3063
|
+
return self.create_item(workspace_id=workspace_id,
|
3064
|
+
display_name = display_name,
|
3065
|
+
type = "digitaltwinbuilders",
|
3066
|
+
definition = definition,
|
3067
|
+
description = description, folder_id=folder_id)
|
3068
|
+
|
3069
|
+
def delete_digital_twin_builder(self, workspace_id, digital_twin_builder_id):
|
3070
|
+
"""Delete a digital twin builder from a workspace
|
3071
|
+
Args:
|
3072
|
+
workspace_id (str): The ID of the workspace
|
3073
|
+
digital_twin_builder_id (str): The ID of the digital twin builder
|
3074
|
+
Returns:
|
3075
|
+
int: The status code of the response
|
3076
|
+
"""
|
3077
|
+
return self.delete_item(workspace_id, item_id=digital_twin_builder_id, type="digitaltwinbuilders")
|
3078
|
+
|
3079
|
+
def get_digital_twin_builder(self, workspace_id, digital_twin_builder_id = None, digital_twin_builder_name = None):
|
3080
|
+
"""Get a digital twin builder from a workspace
|
3081
|
+
Args:
|
3082
|
+
workspace_id (str): The ID of the workspace
|
3083
|
+
digital_twin_builder_id (str): The ID of the digital twin builder
|
3084
|
+
digital_twin_builder_name (str): The name of the digital twin builder
|
3085
|
+
Returns:
|
3086
|
+
DigitalTwinBuilder: The digital twin builder object
|
3087
|
+
"""
|
3088
|
+
from msfabricpysdkcore.otheritems import DigitalTwinBuilder
|
3089
|
+
|
3090
|
+
if digital_twin_builder_id is None and digital_twin_builder_name is not None:
|
3091
|
+
digital_twin_builders = self.list_digital_twin_builders(workspace_id)
|
3092
|
+
dtbs = [dtb for dtb in digital_twin_builders if dtb.display_name == digital_twin_builder_name]
|
3093
|
+
if len(dtbs) == 0:
|
3094
|
+
raise Exception(f"Digital twin builder with name {digital_twin_builder_name} not found")
|
3095
|
+
digital_twin_builder_id = dtbs[0].id
|
3096
|
+
elif digital_twin_builder_id is None:
|
3097
|
+
raise Exception("digital_twin_builder_id or the digital_twin_builder_name is required")
|
3098
|
+
|
3099
|
+
url = f"https://api.fabric.microsoft.com/v1/workspaces/{workspace_id}/digitaltwinbuilders/{digital_twin_builder_id}"
|
3100
|
+
|
3101
|
+
item_dict = self.calling_routine(url, operation="GET", response_codes=[200, 429],
|
3102
|
+
error_message="Error getting digital twin builder", return_format="json")
|
3103
|
+
|
3104
|
+
dtb = DigitalTwinBuilder.from_dict(item_dict, core_client=self)
|
3105
|
+
dtb.get_definition()
|
3106
|
+
return dtb
|
3107
|
+
|
3108
|
+
def get_digital_twin_builder_definition(self, workspace_id, digital_twin_builder_id, format = None):
|
3109
|
+
"""Get the definition of a digital twin builder
|
3110
|
+
Args:
|
3111
|
+
workspace_id (str): The ID of the workspace
|
3112
|
+
digital_twin_builder_id (str): The ID of the digital twin builder
|
3113
|
+
format (str): The format of the definition
|
3114
|
+
Returns:
|
3115
|
+
dict: The digital twin builder definition
|
3116
|
+
"""
|
3117
|
+
return self.get_item_definition(workspace_id, digital_twin_builder_id, type="digitaltwinbuilders", format=format)
|
3118
|
+
|
3119
|
+
def list_digital_twin_builders(self, workspace_id, with_properties = False):
|
3120
|
+
"""List digital twin builders in a workspace
|
3121
|
+
Args:
|
3122
|
+
workspace_id (str): The ID of the workspace
|
3123
|
+
with_properties (bool): Whether to get the item object with properties
|
3124
|
+
Returns:
|
3125
|
+
list: The list of digital twin builders
|
3126
|
+
"""
|
3127
|
+
return self.list_items(workspace_id, type="digitaltwinbuilders", with_properties=with_properties)
|
3128
|
+
|
3129
|
+
def update_digital_twin_builder(self, workspace_id, digital_twin_builder_id, display_name = None, description = None, return_item=False):
|
3130
|
+
"""Update a digital twin builder in a workspace
|
3131
|
+
Args:
|
3132
|
+
workspace_id (str): The ID of the workspace
|
3133
|
+
digital_twin_builder_id (str): The ID of the digital twin builder
|
3134
|
+
display_name (str): The display name of the digital twin builder
|
3135
|
+
description (str): The description of the digital twin builder
|
3136
|
+
Returns:
|
3137
|
+
dict: The updated digital twin builder or DigitalTwinBuilder object if return_item is True
|
3138
|
+
"""
|
3139
|
+
return self.update_item(workspace_id, item_id=digital_twin_builder_id, display_name=display_name, description=description, type="digitaltwinbuilders",
|
3140
|
+
return_item=return_item)
|
3141
|
+
|
3142
|
+
def update_digital_twin_builder_definition(self, workspace_id, digital_twin_builder_id, definition, update_metadata = None):
|
3143
|
+
"""Update the definition of a digital twin builder
|
3144
|
+
Args:
|
3145
|
+
workspace_id (str): The ID of the workspace
|
3146
|
+
digital_twin_builder_id (str): The ID of the digital twin builder
|
3147
|
+
definition (dict): The definition of the digital twin builder
|
3148
|
+
update_metadata (bool): Whether to update the metadata
|
3149
|
+
Returns:
|
3150
|
+
dict: The updated digital twin builder definition
|
3151
|
+
"""
|
3152
|
+
return self.update_item_definition(workspace_id, digital_twin_builder_id, type="digitaltwinbuilders", definition=definition, update_metadata=update_metadata)
|
3153
|
+
|
3154
|
+
# digital twin builder flows
|
3155
|
+
# POST https://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/DigitalTwinBuilderFlows
|
3156
|
+
def create_digital_twin_builder_flow(self, workspace_id, display_name, creation_payload, definition = None, description = None):
|
3157
|
+
"""Create a digital twin builder flow in a workspace
|
3158
|
+
Args:
|
3159
|
+
workspace_id (str): The ID of the workspace
|
3160
|
+
display_name (str): The display name of the digital twin builder flow
|
3161
|
+
creation_payload (dict): The creation payload for the digital twin builder flow
|
3162
|
+
definition (dict): The definition of the digital twin builder flow
|
3163
|
+
description (str): The description of the digital twin builder flow
|
3164
|
+
Returns:
|
3165
|
+
DigitalTwinBuilderFlow: The created digital twin builder flow object
|
3166
|
+
"""
|
3167
|
+
return self.create_item(workspace_id=workspace_id,
|
3168
|
+
display_name = display_name,
|
3169
|
+
type = "DigitalTwinBuilderFlows",
|
3170
|
+
definition = definition,
|
3171
|
+
description = description, creation_payload=creation_payload)
|
3172
|
+
|
3173
|
+
def delete_digital_twin_builder_flow(self, workspace_id, digital_twin_builder_flow_id):
|
3174
|
+
"""Delete a digital twin builder flow from a workspace
|
3175
|
+
Args:
|
3176
|
+
workspace_id (str): The ID of the workspace
|
3177
|
+
digital_twin_builder_flow_id (str): The ID of the digital twin builder flow
|
3178
|
+
Returns:
|
3179
|
+
int: The status code of the response
|
3180
|
+
"""
|
3181
|
+
return self.delete_item(workspace_id, item_id=digital_twin_builder_flow_id, type="DigitalTwinBuilderFlows")
|
3182
|
+
|
3183
|
+
def get_digital_twin_builder_flow(self, workspace_id, digital_twin_builder_flow_id = None, digital_twin_builder_flow_name = None):
|
3184
|
+
"""Get a digital twin builder flow from a workspace
|
3185
|
+
Args:
|
3186
|
+
workspace_id (str): The ID of the workspace
|
3187
|
+
digital_twin_builder_flow_id (str): The ID of the digital twin builder flow
|
3188
|
+
digital_twin_builder_flow_name (str): The name of the digital twin builder flow
|
3189
|
+
Returns:
|
3190
|
+
DigitalTwinBuilderFlow: The digital twin builder flow object
|
3191
|
+
"""
|
3192
|
+
from msfabricpysdkcore.otheritems import DigitalTwinBuilderFlow
|
3193
|
+
|
3194
|
+
if digital_twin_builder_flow_id is None and digital_twin_builder_flow_name is not None:
|
3195
|
+
digital_twin_builder_flows = self.list_digital_twin_builder_flows(workspace_id)
|
3196
|
+
dtbfs = [dtbf for dtbf in digital_twin_builder_flows if dtbf.display_name == digital_twin_builder_flow_name]
|
3197
|
+
if len(dtbfs) == 0:
|
3198
|
+
raise Exception(f"Digital twin builder flow with name {digital_twin_builder_flow_name} not found")
|
3199
|
+
digital_twin_builder_flow_id = dtbfs[0].id
|
3200
|
+
elif digital_twin_builder_flow_id is None:
|
3201
|
+
raise Exception("digital_twin_builder_flow_id or the digital_twin_builder_flow_name is required")
|
3202
|
+
url = f"https://api.fabric.microsoft.com/v1/workspaces/{workspace_id}/DigitalTwinBuilderFlows/{digital_twin_builder_flow_id}"
|
3203
|
+
item_dict = self.calling_routine(url, operation="GET", response_codes=[200, 429],
|
3204
|
+
error_message="Error getting digital twin builder flow", return_format="json")
|
3205
|
+
dtbf = DigitalTwinBuilderFlow.from_dict(item_dict, core_client=self)
|
3206
|
+
dtbf.get_definition()
|
3207
|
+
return dtbf
|
3208
|
+
|
3209
|
+
def get_digital_twin_builder_flow_definition(self, workspace_id, digital_twin_builder_flow_id, format = None):
|
3210
|
+
"""Get the definition of a digital twin builder flow
|
3211
|
+
Args:
|
3212
|
+
workspace_id (str): The ID of the workspace
|
3213
|
+
digital_twin_builder_flow_id (str): The ID of the digital twin builder flow
|
3214
|
+
format (str): The format of the definition
|
3215
|
+
Returns:
|
3216
|
+
dict: The digital twin builder flow definition
|
3217
|
+
"""
|
3218
|
+
return self.get_item_definition(workspace_id, digital_twin_builder_flow_id, type="DigitalTwinBuilderFlows", format=format)
|
3219
|
+
|
3220
|
+
def list_digital_twin_builder_flows(self, workspace_id, with_properties = False):
|
3221
|
+
"""List digital twin builder flows in a workspace
|
3222
|
+
Args:
|
3223
|
+
workspace_id (str): The ID of the workspace
|
3224
|
+
with_properties (bool): Whether to get the item object with properties
|
3225
|
+
Returns:
|
3226
|
+
list: The list of digital twin builder flows
|
3227
|
+
"""
|
3228
|
+
return self.list_items(workspace_id, type="DigitalTwinBuilderFlows", with_properties=with_properties)
|
3229
|
+
|
3230
|
+
def update_digital_twin_builder_flow(self, workspace_id, digital_twin_builder_flow_id, display_name = None, description = None, return_item=False):
|
3231
|
+
"""Update a digital twin builder flow in a workspace
|
3232
|
+
Args:
|
3233
|
+
workspace_id (str): The ID of the workspace
|
3234
|
+
digital_twin_builder_flow_id (str): The ID of the digital twin builder flow
|
3235
|
+
display_name (str): The display name of the digital twin builder flow
|
3236
|
+
description (str): The description of the digital twin builder flow
|
3237
|
+
Returns:
|
3238
|
+
dict: The updated digital twin builder flow or DigitalTwinBuilderFlow object if return_item is True
|
3239
|
+
"""
|
3240
|
+
return self.update_item(workspace_id, item_id=digital_twin_builder_flow_id, display_name=display_name, description=description, type="DigitalTwinBuilderFlows",
|
3241
|
+
return_item=return_item)
|
3242
|
+
|
3243
|
+
def update_digital_twin_builder_flow_definition(self, workspace_id, digital_twin_builder_flow_id, definition, update_metadata = None):
|
3244
|
+
"""Update the definition of a digital twin builder flow
|
3245
|
+
Args:
|
3246
|
+
workspace_id (str): The ID of the workspace
|
3247
|
+
digital_twin_builder_flow_id (str): The ID of the digital twin builder flow
|
3248
|
+
definition (dict): The definition of the digital twin builder flow
|
3249
|
+
update_metadata (bool): Whether to update the metadata
|
3250
|
+
Returns:
|
3251
|
+
dict: The updated digital twin builder flow definition
|
3252
|
+
"""
|
3253
|
+
return self.update_item_definition(workspace_id, digital_twin_builder_flow_id, type="DigitalTwinBuilderFlows", definition=definition, update_metadata=update_metadata)
|
2819
3254
|
|
2820
3255
|
# environments
|
2821
3256
|
|
@@ -3053,11 +3488,13 @@ class FabricClientCore(FabricClient):
|
|
3053
3488
|
|
3054
3489
|
# eventhouses
|
3055
3490
|
|
3056
|
-
def create_eventhouse(self, workspace_id, display_name, description = None):
|
3491
|
+
def create_eventhouse(self, workspace_id, display_name, definition = None, creation_payload = None, description = None):
|
3057
3492
|
"""Create an eventhouse in a workspace
|
3058
3493
|
Args:
|
3059
3494
|
workspace_id (str): The ID of the workspace
|
3060
3495
|
display_name (str): The display name of the eventhouse
|
3496
|
+
definition (str): The definition of the eventhouse
|
3497
|
+
creation_payload (dict): The creation payload for the eventhouse
|
3061
3498
|
description (str): The description of the eventhouse
|
3062
3499
|
Returns:
|
3063
3500
|
Eventhouse: The created eventhouse
|
@@ -3065,7 +3502,8 @@ class FabricClientCore(FabricClient):
|
|
3065
3502
|
return self.create_item(workspace_id=workspace_id,
|
3066
3503
|
display_name = display_name,
|
3067
3504
|
type = "eventhouses",
|
3068
|
-
definition =
|
3505
|
+
definition = definition,
|
3506
|
+
creation_payload = creation_payload,
|
3069
3507
|
description = description)
|
3070
3508
|
|
3071
3509
|
def delete_eventhouse(self, workspace_id, eventhouse_id):
|
@@ -3156,7 +3594,7 @@ class FabricClientCore(FabricClient):
|
|
3156
3594
|
|
3157
3595
|
# eventstreams
|
3158
3596
|
|
3159
|
-
def create_eventstream(self, workspace_id, display_name, description = None):
|
3597
|
+
def create_eventstream(self, workspace_id, display_name, description = None, definition = None, creation_payload = None):
|
3160
3598
|
"""Create an eventstream in a workspace
|
3161
3599
|
Args:
|
3162
3600
|
workspace_id (str): The ID of the workspace
|
@@ -3168,7 +3606,8 @@ class FabricClientCore(FabricClient):
|
|
3168
3606
|
return self.create_item(workspace_id = workspace_id,
|
3169
3607
|
display_name = display_name,
|
3170
3608
|
type = "eventstreams",
|
3171
|
-
definition =
|
3609
|
+
definition = definition,
|
3610
|
+
creation_payload= creation_payload,
|
3172
3611
|
description = description)
|
3173
3612
|
|
3174
3613
|
|
@@ -3850,6 +4289,195 @@ class FabricClientCore(FabricClient):
|
|
3850
4289
|
type="kqlQuerysets", definition=definition, update_metadata=update_metadata)
|
3851
4290
|
|
3852
4291
|
|
4292
|
+
# mirrored azure databricks catalog
|
4293
|
+
def create_mirrored_azure_databricks_catalog(self, workspace_id, display_name, description = None, definition = None,creation_payload = None, folder_id = None):
|
4294
|
+
"""Create a mirrored azure databricks catalog in a workspace
|
4295
|
+
Args:
|
4296
|
+
workspace_id (str): The ID of the workspace
|
4297
|
+
display_name (str): The display name of the mirrored azure databricks catalog
|
4298
|
+
description (str): The description of the mirrored azure databricks catalog
|
4299
|
+
creation_payload (dict): The creation payload
|
4300
|
+
folder_id (str): The folder ID to create the item in
|
4301
|
+
Returns:
|
4302
|
+
dict: The created mirrored azure databricks catalog
|
4303
|
+
"""
|
4304
|
+
if creation_payload is None and definition is None:
|
4305
|
+
raise Exception("Either creation_payload or definition must be provided")
|
4306
|
+
if creation_payload is not None and definition is not None:
|
4307
|
+
raise Exception("Only one of creation_payload or definition can be provided")
|
4308
|
+
return self.create_item(workspace_id = workspace_id,
|
4309
|
+
display_name = display_name,
|
4310
|
+
type = "mirroredAzureDatabricksCatalogs",
|
4311
|
+
description = description,
|
4312
|
+
definition = definition,
|
4313
|
+
creation_payload = creation_payload,
|
4314
|
+
folder_id = folder_id)
|
4315
|
+
|
4316
|
+
def delete_mirrored_azure_databricks_catalog(self, workspace_id, mirrored_azure_databricks_catalog_id):
|
4317
|
+
"""Delete a mirrored azure databricks catalog from a workspace
|
4318
|
+
Args:
|
4319
|
+
workspace_id (str): The ID of the workspace
|
4320
|
+
mirrored_azure_databricks_catalog_id (str): The ID of the mirrored azure databricks catalog
|
4321
|
+
Returns:
|
4322
|
+
int: The status code of the response
|
4323
|
+
"""
|
4324
|
+
return self.delete_item(workspace_id, mirrored_azure_databricks_catalog_id, type="mirroredAzureDatabricksCatalogs")
|
4325
|
+
|
4326
|
+
def get_mirrored_azure_databricks_catalog(self, workspace_id, mirrored_azure_databricks_catalog_id = None,
|
4327
|
+
mirrored_azure_databricks_catalog_name = None):
|
4328
|
+
"""Get a mirrored azure databricks catalog from a workspace
|
4329
|
+
Args:
|
4330
|
+
workspace_id (str): The ID of the workspace
|
4331
|
+
mirrored_azure_databricks_catalog_id (str): The ID of the mirrored azure dat
|
4332
|
+
mirrored_azure_databricks_catalog_name (str): The name of the mirrored azure databricks catalog
|
4333
|
+
Returns:
|
4334
|
+
MirroredAzureDatabricksCatalog: The mirrored azure databricks catalog object
|
4335
|
+
"""
|
4336
|
+
from msfabricpysdkcore.otheritems import MirroredAzureDatabricksCatalog
|
4337
|
+
if mirrored_azure_databricks_catalog_id is None and mirrored_azure_databricks_catalog_name is not None:
|
4338
|
+
mirrored_azure_databricks_catalogs = self.list_mirrored_azure_databricks_catalogs(workspace_id)
|
4339
|
+
mirrored_azure_databricks_catalogs = [madc for madc in mirrored_azure_databricks_catalogs if madc.display_name == mirrored_azure_databricks_catalog_name]
|
4340
|
+
if len(mirrored_azure_databricks_catalogs) == 0:
|
4341
|
+
raise Exception(f"Mirrored azure databricks catalog with name {mirrored_azure_databricks_catalog_name} not found")
|
4342
|
+
mirrored_azure_databricks_catalog_id = mirrored_azure_databricks_catalogs[0].id
|
4343
|
+
if mirrored_azure_databricks_catalog_id is None:
|
4344
|
+
raise Exception("mirrored_azure_databricks_catalog_id or the mirrored_azure_databricks_catalog_name is required")
|
4345
|
+
url = f"https://api.fabric.microsoft.com/v1/workspaces/{workspace_id}/mirroredAzureDatabricksCatalogs/{mirrored_azure_databricks_catalog_id}"
|
4346
|
+
item_dict = self.calling_routine(url, operation="GET", response_codes=[200, 429],
|
4347
|
+
error_message="Error getting mirrored azure databricks catalog", return_format="json")
|
4348
|
+
madc = MirroredAzureDatabricksCatalog.from_dict(item_dict, core_client=self)
|
4349
|
+
madc.get_definition()
|
4350
|
+
return madc
|
4351
|
+
|
4352
|
+
def get_mirrored_azure_databricks_catalog_definition(self, workspace_id, mirrored_azure_databricks_catalog_id, format=None):
|
4353
|
+
"""Get the definition of a mirrored azure databricks catalog
|
4354
|
+
Args:
|
4355
|
+
workspace_id (str): The ID of the workspace
|
4356
|
+
mirrored_azure_databricks_catalog_id (str): The ID of the mirrored azure databricks catalog
|
4357
|
+
format (str): The format of the definition
|
4358
|
+
Returns:
|
4359
|
+
dict: The definition of the mirrored azure databricks catalog
|
4360
|
+
"""
|
4361
|
+
return self.get_item_definition(workspace_id, mirrored_azure_databricks_catalog_id, type="mirroredAzureDatabricksCatalogs", format=format)
|
4362
|
+
|
4363
|
+
def list_mirrored_azure_databricks_catalogs(self, workspace_id, with_properties = False):
|
4364
|
+
"""List mirrored azure databricks catalogs in a workspace
|
4365
|
+
Args:
|
4366
|
+
workspace_id (str): The ID of the workspace
|
4367
|
+
with_properties (bool): Whether to get the item object with properties
|
4368
|
+
Returns:
|
4369
|
+
list: The list of mirrored azure databricks catalogs
|
4370
|
+
"""
|
4371
|
+
return self.list_items(workspace_id=workspace_id, type="mirroredAzureDatabricksCatalogs", with_properties=with_properties)
|
4372
|
+
|
4373
|
+
def update_mirrored_azure_databricks_catalog(self, workspace_id, mirrored_azure_databricks_catalog_id, display_name = None, description = None, return_item=False):
|
4374
|
+
"""Update a mirrored azure databricks catalog in a workspace
|
4375
|
+
Args:
|
4376
|
+
workspace_id (str): The ID of the workspace
|
4377
|
+
mirrored_azure_databricks_catalog_id (str): The ID of the mirrored azure databricks catalog
|
4378
|
+
display_name (str): The display name of the mirrored azure databricks catalog
|
4379
|
+
description (str): The description of the mirrored azure databricks catalog
|
4380
|
+
Returns:
|
4381
|
+
dict: The updated mirrored azure databricks catalog
|
4382
|
+
"""
|
4383
|
+
return self.update_item(workspace_id, mirrored_azure_databricks_catalog_id, display_name = display_name,
|
4384
|
+
description = description, type= "mirroredAzureDatabricksCatalogs", return_item=return_item)
|
4385
|
+
|
4386
|
+
def update_mirrored_azure_databricks_catalog_definition(self, workspace_id, mirrored_azure_databricks_catalog_id, definition, update_metadata = None):
|
4387
|
+
"""Update the definition of a mirrored azure databricks catalog
|
4388
|
+
Args:
|
4389
|
+
workspace_id (str): The ID of the workspace
|
4390
|
+
mirrored_azure_databricks_catalog_id (str): The ID of the mirrored azure databricks catalog
|
4391
|
+
definition (dict): The definition of the mirrored azure databricks catalog
|
4392
|
+
update_metadata (bool): Whether to update the metadata
|
4393
|
+
Returns:
|
4394
|
+
dict: The updated definition of the mirrored azure databricks catalog
|
4395
|
+
"""
|
4396
|
+
return self.update_item_definition(workspace_id, mirrored_azure_databricks_catalog_id,
|
4397
|
+
type="mirroredAzureDatabricksCatalogs", definition=definition, update_metadata=update_metadata)
|
4398
|
+
|
4399
|
+
# POST https://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/mirroredAzureDatabricksCatalogs/{mirroredAzureDatabricksCatalogId}/refreshCatalogMetadata
|
4400
|
+
def refresh_mirrored_azure_databricks_catalog_metadata(self, workspace_id, mirrored_azure_databricks_catalog_id, wait_for_completion = False):
|
4401
|
+
"""Refresh the metadata of a mirrored azure databricks catalog
|
4402
|
+
Args:
|
4403
|
+
workspace_id (str): The ID of the workspace
|
4404
|
+
mirrored_azure_databricks_catalog_id (str): The ID of the mirrored azure databricks catalog
|
4405
|
+
Returns:
|
4406
|
+
dict: The operation result or response value
|
4407
|
+
"""
|
4408
|
+
url = f"https://api.fabric.microsoft.com/v1/workspaces/{workspace_id}/mirroredAzureDatabricksCatalogs/{mirrored_azure_databricks_catalog_id}/refreshCatalogMetadata"
|
4409
|
+
|
4410
|
+
response = self.calling_routine(url, operation="POST", response_codes=[200, 202, 429],
|
4411
|
+
error_message="Error refreshing mirrored azure databricks catalog metadata",
|
4412
|
+
return_format="response", wait_for_completion=wait_for_completion)
|
4413
|
+
|
4414
|
+
return response.status_code
|
4415
|
+
|
4416
|
+
# GET https://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/azuredatabricks/catalogs?databricksWorkspaceConnectionId={databricksWorkspaceConnectionId}
|
4417
|
+
def discover_mirrored_azure_databricks_catalogs(self, workspace_id, databricks_workspace_connection_id, max_results = None):
|
4418
|
+
"""List mirrored azure databricks catalogs by connection
|
4419
|
+
Args:
|
4420
|
+
workspace_id (str): The ID of the workspace
|
4421
|
+
databricks_workspace_connection_id (str): The ID of the databricks workspace connection
|
4422
|
+
max_results (int): The maximum number of results to return
|
4423
|
+
Returns:
|
4424
|
+
list: The list of mirrored azure databricks catalogs
|
4425
|
+
"""
|
4426
|
+
url = f"https://api.fabric.microsoft.com/v1/workspaces/{workspace_id}/azuredatabricks/catalogs?databricksWorkspaceConnectionId={databricks_workspace_connection_id}"
|
4427
|
+
|
4428
|
+
if max_results is not None:
|
4429
|
+
url += f"&maxResults={max_results}"
|
4430
|
+
|
4431
|
+
items = self.calling_routine(url, operation="GET", response_codes=[200, 429],
|
4432
|
+
error_message="Error listing mirrored azure databricks catalogs by connection", return_format="value_json", paging=True)
|
4433
|
+
|
4434
|
+
return items
|
4435
|
+
|
4436
|
+
# GET https://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/azuredatabricks/catalogs/{catalogName}/schemas?databricksWorkspaceConnectionId={databricksWorkspaceConnectionId}
|
4437
|
+
def discover_mirrored_azure_databricks_catalog_schemas(self, workspace_id, catalog_name, databricks_workspace_connection_id, max_results = None):
|
4438
|
+
"""List mirrored azure databricks catalog schemas by connection
|
4439
|
+
Args:
|
4440
|
+
workspace_id (str): The ID of the workspace
|
4441
|
+
catalog_name (str): The name of the catalog
|
4442
|
+
databricks_workspace_connection_id (str): The ID of the databricks workspace connection
|
4443
|
+
max_results (int): The maximum number of results to return
|
4444
|
+
Returns:
|
4445
|
+
list: The list of mirrored azure databricks catalog schemas
|
4446
|
+
"""
|
4447
|
+
url = f"https://api.fabric.microsoft.com/v1/workspaces/{workspace_id}/azuredatabricks/catalogs/{catalog_name}/schemas?databricksWorkspaceConnectionId={databricks_workspace_connection_id}"
|
4448
|
+
|
4449
|
+
if max_results is not None:
|
4450
|
+
url += f"&maxResults={max_results}"
|
4451
|
+
|
4452
|
+
items = self.calling_routine(url, operation="GET", response_codes=[200, 429],
|
4453
|
+
error_message="Error listing mirrored azure databricks catalog schemas by connection",
|
4454
|
+
return_format="value_json", paging=True)
|
4455
|
+
|
4456
|
+
return items
|
4457
|
+
|
4458
|
+
# GET https://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/azuredatabricks/catalogs/{catalogName}/schemas/{schemaName}/tables?databricksWorkspaceConnectionId={databricksWorkspaceConnectionId}
|
4459
|
+
def discover_mirrored_azure_databricks_catalog_tables(self, workspace_id, catalog_name, schema_name, databricks_workspace_connection_id, max_results = None):
|
4460
|
+
"""List mirrored azure databricks catalog tables by connection
|
4461
|
+
Args:
|
4462
|
+
workspace_id (str): The ID of the workspace
|
4463
|
+
catalog_name (str): The name of the catalog
|
4464
|
+
schema_name (str): The name of the schema
|
4465
|
+
databricks_workspace_connection_id (str): The ID of the databricks workspace connection
|
4466
|
+
max_results (int): The maximum number of results to return
|
4467
|
+
Returns:
|
4468
|
+
list: The list of mirrored azure databricks catalog tables
|
4469
|
+
"""
|
4470
|
+
url = f"https://api.fabric.microsoft.com/v1/workspaces/{workspace_id}/azuredatabricks/catalogs/{catalog_name}/schemas/{schema_name}/tables?databricksWorkspaceConnectionId={databricks_workspace_connection_id}"
|
4471
|
+
|
4472
|
+
if max_results is not None:
|
4473
|
+
url += f"&maxResults={max_results}"
|
4474
|
+
|
4475
|
+
items = self.calling_routine(url, operation="GET", response_codes=[200, 429],
|
4476
|
+
error_message="Error listing mirrored azure databricks catalog tables by connection",
|
4477
|
+
return_format="value_json", paging=True)
|
4478
|
+
|
4479
|
+
return items
|
4480
|
+
|
3853
4481
|
# lakehouses
|
3854
4482
|
|
3855
4483
|
def run_on_demand_table_maintenance(self, workspace_id, lakehouse_id,
|
@@ -3877,7 +4505,7 @@ class FabricClientCore(FabricClient):
|
|
3877
4505
|
|
3878
4506
|
return respone_operation_result
|
3879
4507
|
|
3880
|
-
def create_lakehouse(self, workspace_id, display_name, description = None):
|
4508
|
+
def create_lakehouse(self, workspace_id, display_name, description = None, creation_payload = None):
|
3881
4509
|
"""Create a lakehouse in a workspace
|
3882
4510
|
Args:
|
3883
4511
|
workspace_id (str): The ID of the workspace
|
@@ -3889,23 +4517,9 @@ class FabricClientCore(FabricClient):
|
|
3889
4517
|
return self.create_item(workspace_id = workspace_id,
|
3890
4518
|
display_name = display_name,
|
3891
4519
|
type = "lakehouses",
|
3892
|
-
description = description
|
4520
|
+
description = description,
|
4521
|
+
creation_payload = creation_payload)
|
3893
4522
|
|
3894
|
-
def create_lakehouse_with_schema(self, workspace_id, display_name, creation_payload, description = None):
|
3895
|
-
"""Create a lakehouse in a workspace
|
3896
|
-
Args:
|
3897
|
-
workspace_id (str): The ID of the workspace
|
3898
|
-
creation_payload (dict): The creation payload
|
3899
|
-
display_name (str): The display name of the lakehouse
|
3900
|
-
description (str): The description of the lakehouse
|
3901
|
-
Returns:
|
3902
|
-
dict: The created lakehouse
|
3903
|
-
"""
|
3904
|
-
return self.create_item(workspace_id = workspace_id,
|
3905
|
-
display_name = display_name,
|
3906
|
-
type = "lakehouses",
|
3907
|
-
creation_payload = creation_payload,
|
3908
|
-
description = description)
|
3909
4523
|
|
3910
4524
|
def delete_lakehouse(self, workspace_id, lakehouse_id):
|
3911
4525
|
"""Delete a lakehouse from a workspace
|
@@ -5340,6 +5954,28 @@ class FabricClientCore(FabricClient):
|
|
5340
5954
|
return self.update_item(workspace_id, sql_database_id, display_name = display_name, description = description,
|
5341
5955
|
type="SQLDatabases", return_item=return_item)
|
5342
5956
|
|
5957
|
+
# SQL endpoints
|
5958
|
+
# POST https://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/sqlEndpoints/{sqlEndpointId}/refreshMetadata?preview={preview}
|
5959
|
+
def refresh_sql_endpoint_metadata(self, workspace_id, sql_endpoint_id, preview = True, timeout = None, wait_for_completion = False):
|
5960
|
+
"""Refresh the metadata of a SQL endpoint
|
5961
|
+
Args:
|
5962
|
+
workspace_id (str): The ID of the workspace
|
5963
|
+
sql_endpoint_id (str): The ID of the SQL endpoint
|
5964
|
+
preview (bool): Whether to preview the refresh
|
5965
|
+
timeout (int): The timeout for the request
|
5966
|
+
Returns:
|
5967
|
+
response: The response of the refresh operation
|
5968
|
+
"""
|
5969
|
+
body = {}
|
5970
|
+
if timeout is not None:
|
5971
|
+
body["timeout"] = timeout
|
5972
|
+
url = f"https://api.fabric.microsoft.com/v1/workspaces/{workspace_id}/sqlEndpoints/{sql_endpoint_id}/refreshMetadata?preview={preview}"
|
5973
|
+
|
5974
|
+
response = self.calling_routine(url, operation="POST", body=body, response_codes=[200, 202, 429],
|
5975
|
+
error_message="Error refreshing SQL endpoint metadata", return_format="response",
|
5976
|
+
wait_for_completion=wait_for_completion)
|
5977
|
+
|
5978
|
+
return response
|
5343
5979
|
|
5344
5980
|
# warehouses
|
5345
5981
|
|
@@ -5412,3 +6048,77 @@ class FabricClientCore(FabricClient):
|
|
5412
6048
|
"""
|
5413
6049
|
return self.update_item(workspace_id, warehouse_id, display_name = display_name, description = description,
|
5414
6050
|
type="warehouses", return_item=return_item)
|
6051
|
+
|
6052
|
+
# POST https://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/warehousesnapshots
|
6053
|
+
def create_warehouse_snapshot(self, workspace_id, display_name, creation_payload, description = None, folder_id = None):
|
6054
|
+
"""Create a snapshot of a warehouse
|
6055
|
+
Args:
|
6056
|
+
workspace_id (str): The ID of the workspace
|
6057
|
+
display_name (str): The display name of the snapshot
|
6058
|
+
creation_payload (dict): The payload for creating the snapshot
|
6059
|
+
description (str): The description of the snapshot
|
6060
|
+
folder_id (str): The ID of the folder to create the snapshot in
|
6061
|
+
Returns:
|
6062
|
+
dict: The created snapshot
|
6063
|
+
"""
|
6064
|
+
return self.create_item(workspace_id = workspace_id, display_name = display_name, type = "warehousesnapshots",
|
6065
|
+
creation_payload = creation_payload, description = description, folder_id = folder_id)
|
6066
|
+
|
6067
|
+
def delete_warehouse_snapshot(self, workspace_id, warehouse_snapshot_id):
|
6068
|
+
"""Delete a warehouse snapshot from a workspace
|
6069
|
+
Args:
|
6070
|
+
workspace_id (str): The ID of the workspace
|
6071
|
+
warehouse_snapshot_id (str): The ID of the warehouse snapshot
|
6072
|
+
Returns:
|
6073
|
+
int: The status code of the response
|
6074
|
+
"""
|
6075
|
+
return self.delete_item(workspace_id, warehouse_snapshot_id, type="warehousesnapshots")
|
6076
|
+
|
6077
|
+
def get_warehouse_snapshot(self, workspace_id, warehouse_snapshot_id = None, warehouse_snapshot_name = None):
|
6078
|
+
"""Get a warehouse snapshot from a workspace
|
6079
|
+
Args:
|
6080
|
+
workspace_id (str): The ID of the workspace
|
6081
|
+
warehouse_snapshot_id (str): The ID of the warehouse snapshot
|
6082
|
+
warehouse_snapshot_name (str): The name of the warehouse snapshot
|
6083
|
+
Returns:
|
6084
|
+
WarehouseSnapshot: The warehouse snapshot object
|
6085
|
+
"""
|
6086
|
+
from msfabricpysdkcore.otheritems import WarehouseSnapshot
|
6087
|
+
if warehouse_snapshot_id is None and warehouse_snapshot_name is not None:
|
6088
|
+
warehouse_snapshots = self.list_warehouse_snapshots(workspace_id)
|
6089
|
+
warehouse_snapshots = [ws for ws in warehouse_snapshots if ws.display_name == warehouse_snapshot_name]
|
6090
|
+
if len(warehouse_snapshots) == 0:
|
6091
|
+
raise Exception(f"Warehouse snapshot with name {warehouse_snapshot_name} not found")
|
6092
|
+
warehouse_snapshot_id = warehouse_snapshots[0].id
|
6093
|
+
if warehouse_snapshot_id is None:
|
6094
|
+
raise Exception("warehouse_snapshot_id or the warehouse_snapshot_name is required")
|
6095
|
+
|
6096
|
+
url = f"https://api.fabric.microsoft.com/v1/workspaces/{workspace_id}/warehousesnapshots/{warehouse_snapshot_id}"
|
6097
|
+
|
6098
|
+
item_dict = self.calling_routine(url, operation="GET", response_codes=[200, 429],
|
6099
|
+
error_message="Error getting warehouse snapshot", return_format="json")
|
6100
|
+
|
6101
|
+
return WarehouseSnapshot.from_dict(item_dict, core_client=self)
|
6102
|
+
|
6103
|
+
def list_warehouse_snapshots(self, workspace_id, with_properties = False):
|
6104
|
+
"""List warehouse snapshots in a workspace
|
6105
|
+
Args:
|
6106
|
+
workspace_id (str): The ID of the workspace
|
6107
|
+
with_properties (bool): Whether to get the item object with properties
|
6108
|
+
Returns:
|
6109
|
+
list: The list of warehouse snapshots
|
6110
|
+
"""
|
6111
|
+
return self.list_items(workspace_id = workspace_id, type = "warehousesnapshots", with_properties = with_properties)
|
6112
|
+
|
6113
|
+
def update_warehouse_snapshot(self, workspace_id, warehouse_snapshot_id, display_name = None, description = None, return_item=False, properties = None):
|
6114
|
+
"""Update a warehouse snapshot in a workspace
|
6115
|
+
Args:
|
6116
|
+
workspace_id (str): The ID of the workspace
|
6117
|
+
warehouse_snapshot_id (str): The ID of the warehouse snapshot
|
6118
|
+
display_name (str): The display name of the warehouse snapshot
|
6119
|
+
description (str): The description of the warehouse snapshot
|
6120
|
+
Returns:
|
6121
|
+
dict: The updated warehouse snapshot
|
6122
|
+
"""
|
6123
|
+
return self.update_item(workspace_id, warehouse_snapshot_id, display_name = display_name, description = description,
|
6124
|
+
type="warehousesnapshots", return_item=return_item, properties=properties)
|