msfabricpysdkcore 0.2.6__py3-none-any.whl → 0.2.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- msfabricpysdkcore/coreapi.py +745 -21
- msfabricpysdkcore/otheritems.py +61 -1
- msfabricpysdkcore/tests/test_admin_apis.py +2 -2
- msfabricpysdkcore/tests/test_apache_airflow_job.py +60 -0
- msfabricpysdkcore/tests/test_digital_twin_builder.py +60 -0
- msfabricpysdkcore/tests/test_domains.py +11 -18
- msfabricpysdkcore/tests/test_environments.py +34 -27
- msfabricpysdkcore/tests/test_evenstreams.py +31 -26
- msfabricpysdkcore/tests/test_eventhouses.py +63 -0
- msfabricpysdkcore/tests/test_external_data_shares.py +2 -2
- msfabricpysdkcore/tests/test_fabric_azure_client.py +1 -1
- msfabricpysdkcore/tests/test_folders.py +9 -6
- msfabricpysdkcore/tests/test_gateways.py +3 -3
- msfabricpysdkcore/tests/test_git.py +1 -1
- msfabricpysdkcore/tests/test_items.py +2 -2
- msfabricpysdkcore/tests/test_jobs.py +6 -4
- msfabricpysdkcore/tests/test_kql_dashboards.py +3 -3
- msfabricpysdkcore/tests/test_kql_queryset.py +1 -5
- msfabricpysdkcore/tests/test_kqldatabases.py +3 -3
- msfabricpysdkcore/tests/test_lakehouse.py +6 -10
- msfabricpysdkcore/tests/test_managed_private_endpoints.py +13 -12
- msfabricpysdkcore/tests/test_mirrored_azuredatabricks_catalog.py +81 -0
- msfabricpysdkcore/tests/test_ml_experiments.py +1 -3
- msfabricpysdkcore/tests/test_ml_models.py +2 -3
- msfabricpysdkcore/tests/test_mounted_adf.py +1 -1
- msfabricpysdkcore/tests/test_notebooks.py +3 -2
- msfabricpysdkcore/tests/test_one_lake_data_access_security.py +2 -2
- msfabricpysdkcore/tests/test_other_items.py +2 -2
- msfabricpysdkcore/tests/test_reflex.py +1 -2
- msfabricpysdkcore/tests/test_semantic_model.py +1 -1
- msfabricpysdkcore/tests/test_shortcuts.py +3 -4
- msfabricpysdkcore/tests/test_spark.py +2 -2
- msfabricpysdkcore/tests/test_sparkjobdefinition.py +1 -1
- msfabricpysdkcore/tests/test_sql_endpoint.py +28 -0
- msfabricpysdkcore/tests/test_warehouses.py +1 -1
- msfabricpysdkcore/tests/test_workspaces_capacities.py +8 -8
- msfabricpysdkcore/workspace.py +180 -10
- {msfabricpysdkcore-0.2.6.dist-info → msfabricpysdkcore-0.2.8.dist-info}/METADATA +4 -2
- msfabricpysdkcore-0.2.8.dist-info/RECORD +78 -0
- msfabricpysdkcore/tests/test_deployment_pipeline.py +0 -63
- msfabricpysdkcore/tests/test_evenhouses.py +0 -56
- msfabricpysdkcore-0.2.6.dist-info/RECORD +0 -75
- {msfabricpysdkcore-0.2.6.dist-info → msfabricpysdkcore-0.2.8.dist-info}/WHEEL +0 -0
- {msfabricpysdkcore-0.2.6.dist-info → msfabricpysdkcore-0.2.8.dist-info}/licenses/LICENSE +0 -0
- {msfabricpysdkcore-0.2.6.dist-info → msfabricpysdkcore-0.2.8.dist-info}/top_level.txt +0 -0
@@ -12,8 +12,8 @@ class TestFabricClientCore(unittest.TestCase):
|
|
12
12
|
super(TestFabricClientCore, self).__init__(*args, **kwargs)
|
13
13
|
#load_dotenv()
|
14
14
|
self.fc = FabricClientCore()
|
15
|
-
self.workspace_id = '
|
16
|
-
self.item_id = "
|
15
|
+
self.workspace_id = '05bc5baa-ef02-4a31-ab20-158a478151d3'
|
16
|
+
self.item_id = "9a2edf0f-2318-4179-80d0-1002f3dae7b1"
|
17
17
|
|
18
18
|
|
19
19
|
def test_jobs_end_to_end(self):
|
@@ -48,9 +48,8 @@ class TestFabricClientCore(unittest.TestCase):
|
|
48
48
|
|
49
49
|
fc = self.fc
|
50
50
|
|
51
|
-
item_id = "42b6e090-24ff-4dc7-8c52-cdae0ddd2c06"
|
52
51
|
|
53
|
-
workspace_id = '
|
52
|
+
workspace_id = '05bc5baa-ef02-4a31-ab20-158a478151d3'
|
54
53
|
|
55
54
|
datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
|
56
55
|
spark_job_definition_name = f"sjd{datetime_str}"
|
@@ -58,6 +57,7 @@ class TestFabricClientCore(unittest.TestCase):
|
|
58
57
|
spark_job_definition_w_content = fc.get_spark_job_definition(workspace_id, spark_job_definition_name="helloworld")
|
59
58
|
definition = fc.get_spark_job_definition_definition(workspace_id, spark_job_definition_w_content.id)
|
60
59
|
|
60
|
+
|
61
61
|
self.assertIsNotNone(definition)
|
62
62
|
self.assertIn("definition", definition)
|
63
63
|
definition = definition["definition"]
|
@@ -82,6 +82,8 @@ class TestFabricClientCore(unittest.TestCase):
|
|
82
82
|
schedule_new = spark_job_definition.update_item_schedule(schedule_id=schedule_id, job_type="sparkjob", configuration=configuration, enabled=False)
|
83
83
|
self.assertIsNotNone(schedule_new)
|
84
84
|
|
85
|
+
item_id = spark_job_definition.id
|
86
|
+
|
85
87
|
schedule_check = spark_job_definition.get_item_schedule(schedule_id=schedule_id, job_type="sparkjob")
|
86
88
|
self.assertEqual(schedule_check["id"], schedule_id)
|
87
89
|
self.assertFalse(schedule_check["enabled"])
|
@@ -17,9 +17,9 @@ class TestFabricClientCore(unittest.TestCase):
|
|
17
17
|
def test_kql_dashboards(self):
|
18
18
|
datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
|
19
19
|
fc = self.fc
|
20
|
-
workspace_id = '
|
20
|
+
workspace_id = '05bc5baa-ef02-4a31-ab20-158a478151d3'
|
21
21
|
|
22
|
-
kql_dash = fc.get_kql_dashboard(workspace_id, kql_dashboard_name='
|
22
|
+
kql_dash = fc.get_kql_dashboard(workspace_id, kql_dashboard_name='dashboard1')
|
23
23
|
kql_dash_orig_id = kql_dash.id
|
24
24
|
|
25
25
|
|
@@ -41,7 +41,7 @@ class TestFabricClientCore(unittest.TestCase):
|
|
41
41
|
kql_dash_names = [kqld.display_name for kqld in kql_dashs]
|
42
42
|
self.assertGreater(len(kql_dashs), 0)
|
43
43
|
self.assertIn(kql_dash_name, kql_dash_names)
|
44
|
-
self.assertIn('
|
44
|
+
self.assertIn('dashboard1', kql_dash_names)
|
45
45
|
|
46
46
|
kql_dash2 = fc.get_kql_dashboard(workspace_id, kql_dashboard_name=kql_dash_name)
|
47
47
|
self.assertIsNotNone(kql_dash2.id)
|
@@ -12,15 +12,11 @@ class TestFabricClientCore(unittest.TestCase):
|
|
12
12
|
super(TestFabricClientCore, self).__init__(*args, **kwargs)
|
13
13
|
#load_dotenv()
|
14
14
|
self.fc = FabricClientCore()
|
15
|
-
|
16
|
-
datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
|
17
|
-
self.item_name = "testitem" + datetime_str
|
18
|
-
self.item_type = "Notebook"
|
19
15
|
|
20
16
|
def test_kql_querysets(self):
|
21
17
|
|
22
18
|
fc = self.fc
|
23
|
-
workspace_id = '
|
19
|
+
workspace_id = '05bc5baa-ef02-4a31-ab20-158a478151d3'
|
24
20
|
|
25
21
|
kql_queryset_name = "kqlqueryset12"
|
26
22
|
kqlq_w_content = fc.get_kql_queryset(workspace_id, kql_queryset_name=kql_queryset_name)
|
@@ -16,11 +16,11 @@ class TestFabricClientCore(unittest.TestCase):
|
|
16
16
|
def test_kql_database(self):
|
17
17
|
|
18
18
|
fc = self.fc
|
19
|
-
workspace_id = '
|
20
|
-
|
19
|
+
workspace_id = '05bc5baa-ef02-4a31-ab20-158a478151d3'
|
20
|
+
eventhouse_id = "71994015-66d8-4df2-b57d-46afe7440209"
|
21
21
|
|
22
22
|
creation_payload = {"databaseType" : "ReadWrite",
|
23
|
-
"parentEventhouseItemId" :
|
23
|
+
"parentEventhouseItemId" : eventhouse_id}
|
24
24
|
|
25
25
|
datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
|
26
26
|
kqldb_name = "kql" + datetime_str
|
@@ -13,21 +13,17 @@ class TestFabricClientCore(unittest.TestCase):
|
|
13
13
|
#load_dotenv()
|
14
14
|
self.fc = FabricClientCore()
|
15
15
|
|
16
|
-
datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
|
17
|
-
self.item_name = "testitem" + datetime_str
|
18
|
-
self.item_type = "Notebook"
|
19
|
-
|
20
16
|
def test_lakehouse(self):
|
21
17
|
|
22
18
|
lakehouse2 = "lh2" + datetime.now().strftime("%Y%m%d%H%M%S")
|
23
19
|
lakehouse3 = "lh3" + datetime.now().strftime("%Y%m%d%H%M%S")
|
24
20
|
|
25
|
-
workspace_id = '
|
26
|
-
lhs = self.fc.list_lakehouses(workspace_id
|
21
|
+
workspace_id = '05bc5baa-ef02-4a31-ab20-158a478151d3'
|
22
|
+
lhs = self.fc.list_lakehouses(workspace_id=workspace_id, with_properties=True)
|
27
23
|
lh = [lh_ for lh_ in lhs if lh_.display_name == "lakelhousewlabels"][0]
|
28
24
|
tables = lh.list_tables()
|
29
25
|
table_names = [t["name"] for t in tables]
|
30
|
-
self.assertIn("
|
26
|
+
self.assertIn("dimension_customer", table_names)
|
31
27
|
|
32
28
|
lakehouse = self.fc.get_item(workspace_id=workspace_id, item_name="lakelhousewlabels", item_type="Lakehouse")
|
33
29
|
self.assertIsNotNone(lakehouse.properties)
|
@@ -37,19 +33,19 @@ class TestFabricClientCore(unittest.TestCase):
|
|
37
33
|
|
38
34
|
|
39
35
|
status_code = self.fc.load_table(workspace_id=workspace_id, lakehouse_id=lakehouse_id, table_name=table_name,
|
40
|
-
path_type="File", relative_path="Files/to_share/
|
36
|
+
path_type="File", relative_path="Files/to_share/dimension_customer.csv")
|
41
37
|
|
42
38
|
self.assertEqual(status_code, 202)
|
43
39
|
|
44
40
|
# Run on demand table maintenance
|
45
|
-
table_name_maintenance = "
|
41
|
+
table_name_maintenance = "dimension_customer"
|
46
42
|
|
47
43
|
execution_data = {
|
48
44
|
"tableName": table_name_maintenance,
|
49
45
|
"optimizeSettings": {
|
50
46
|
"vOrder": True,
|
51
47
|
"zOrderBy": [
|
52
|
-
"
|
48
|
+
"CustomerKey",
|
53
49
|
]
|
54
50
|
},
|
55
51
|
"vacuumSettings": {
|
@@ -17,29 +17,30 @@ class TestFabricClientCore(unittest.TestCase):
|
|
17
17
|
|
18
18
|
fc = self.fc
|
19
19
|
datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
|
20
|
+
workspace_id = '05bc5baa-ef02-4a31-ab20-158a478151d3'
|
20
21
|
|
21
|
-
mpes = fc.list_workspace_managed_private_endpoints(workspace_id=
|
22
|
+
mpes = fc.list_workspace_managed_private_endpoints(workspace_id=workspace_id)
|
22
23
|
|
23
24
|
if len(mpes) > 0:
|
24
25
|
|
25
26
|
for mpe in mpes:
|
26
|
-
status_code = fc.delete_workspace_managed_private_endpoint(workspace_id=
|
27
|
+
status_code = fc.delete_workspace_managed_private_endpoint(workspace_id=workspace_id,
|
27
28
|
managed_private_endpoint_id=mpe["id"])
|
28
29
|
self.assertEqual(status_code, 200)
|
29
30
|
sleep(60)
|
30
31
|
|
31
|
-
mpe = fc.create_workspace_managed_private_endpoint(workspace_id=
|
32
|
-
name
|
33
|
-
target_private_link_resource_id
|
34
|
-
target_subresource_type
|
35
|
-
request_message
|
36
|
-
|
37
|
-
mpes = fc.list_workspace_managed_private_endpoints(workspace_id=
|
32
|
+
mpe = fc.create_workspace_managed_private_endpoint(workspace_id=workspace_id,
|
33
|
+
name=f'testmpe{datetime_str}',
|
34
|
+
target_private_link_resource_id='/subscriptions/c77cc8fc-43bb-4d44-bdc5-6e20511ed2a8/resourceGroups/fabricdemo/providers/Microsoft.Storage/storageAccounts/publicfabricdemo9039',
|
35
|
+
target_subresource_type='dfs',
|
36
|
+
request_message='testmessage')
|
37
|
+
|
38
|
+
mpes = fc.list_workspace_managed_private_endpoints(workspace_id=workspace_id)
|
38
39
|
|
39
40
|
self.assertIsNotNone(mpes)
|
40
41
|
self.assertGreater(len(mpes), 0)
|
41
42
|
|
42
|
-
mpe2 = fc.get_workspace_managed_private_endpoint(workspace_id=
|
43
|
+
mpe2 = fc.get_workspace_managed_private_endpoint(workspace_id=workspace_id,
|
43
44
|
managed_private_endpoint_id=mpe["id"])
|
44
45
|
|
45
46
|
self.assertEqual(mpe2["id"], mpe["id"])
|
@@ -52,10 +53,10 @@ class TestFabricClientCore(unittest.TestCase):
|
|
52
53
|
if mpe2["connectionState"]["status"] != "Pending":
|
53
54
|
sleep(30)
|
54
55
|
else:
|
55
|
-
status_code = fc.delete_workspace_managed_private_endpoint(workspace_id=
|
56
|
+
status_code = fc.delete_workspace_managed_private_endpoint(workspace_id=workspace_id,
|
56
57
|
managed_private_endpoint_id=mpe["id"])
|
57
58
|
self.assertEqual(status_code, 200)
|
58
59
|
break
|
59
|
-
mpe2 = fc.get_workspace_managed_private_endpoint(workspace_id=
|
60
|
+
mpe2 = fc.get_workspace_managed_private_endpoint(workspace_id=workspace_id,
|
60
61
|
managed_private_endpoint_id=mpe["id"])
|
61
62
|
|
@@ -0,0 +1,81 @@
|
|
1
|
+
import unittest
|
2
|
+
from dotenv import load_dotenv
|
3
|
+
from msfabricpysdkcore import FabricClientCore
|
4
|
+
from datetime import datetime
|
5
|
+
load_dotenv()
|
6
|
+
|
7
|
+
class TestFabricClientCore(unittest.TestCase):
|
8
|
+
|
9
|
+
def __init__(self, *args, **kwargs):
|
10
|
+
super(TestFabricClientCore, self).__init__(*args, **kwargs)
|
11
|
+
self.fcc = FabricClientCore()
|
12
|
+
|
13
|
+
def test_mirrored_azure_databricks_catalog(self):
|
14
|
+
fcc = self.fcc
|
15
|
+
|
16
|
+
workspace_id = "05bc5baa-ef02-4a31-ab20-158a478151d3"
|
17
|
+
item_id = "eb5a54af-f282-4612-97c1-95120620b5d3"
|
18
|
+
connection_id = "f7ac4f29-a70e-4868-87a1-9cdd92eacfa0"
|
19
|
+
|
20
|
+
catalog_name = "unitycatalogdbxsweden"
|
21
|
+
schema_name = "testinternal"
|
22
|
+
table_name = "internal_customer"
|
23
|
+
|
24
|
+
mirrored_azure_databricks_catalog = fcc.list_mirrored_azure_databricks_catalogs(workspace_id=workspace_id)
|
25
|
+
for mirrored_azure_databricks_catalog in mirrored_azure_databricks_catalog:
|
26
|
+
if mirrored_azure_databricks_catalog.id != item_id:
|
27
|
+
resp = fcc.delete_mirrored_azure_databricks_catalog(workspace_id=workspace_id, mirrored_azure_databricks_catalog_id=mirrored_azure_databricks_catalog.id)
|
28
|
+
self.assertEqual(resp, 200)
|
29
|
+
|
30
|
+
mirrored_azure_databricks_catalog_definition = fcc.get_mirrored_azure_databricks_catalog_definition(workspace_id=workspace_id, mirrored_azure_databricks_catalog_id=item_id)
|
31
|
+
self.assertIn("definition", mirrored_azure_databricks_catalog_definition)
|
32
|
+
definition = mirrored_azure_databricks_catalog_definition["definition"]
|
33
|
+
|
34
|
+
date_str = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
35
|
+
date_str = date_str.replace(" ", "T").replace(":", "").replace("-", "")
|
36
|
+
date_str = f"copyjob{date_str}"
|
37
|
+
|
38
|
+
mirrored_azure_databricks_catalog_new = fcc.create_mirrored_azure_databricks_catalog(workspace_id=workspace_id, display_name=date_str, definition=definition)
|
39
|
+
|
40
|
+
self.assertEqual(mirrored_azure_databricks_catalog_new.display_name, date_str)
|
41
|
+
|
42
|
+
mirrored_azure_databricks_catalog_get = fcc.get_mirrored_azure_databricks_catalog(workspace_id=workspace_id, mirrored_azure_databricks_catalog_id=mirrored_azure_databricks_catalog_new.id)
|
43
|
+
self.assertEqual(mirrored_azure_databricks_catalog_get.display_name, date_str)
|
44
|
+
|
45
|
+
mirrored_azure_databricks_catalog = fcc.list_mirrored_azure_databricks_catalogs(workspace_id=workspace_id)
|
46
|
+
self.assertEqual(len(mirrored_azure_databricks_catalog), 2)
|
47
|
+
|
48
|
+
date_str_updated = date_str + "_updated"
|
49
|
+
mirrored_azure_databricks_catalog_updated = fcc.update_mirrored_azure_databricks_catalog(workspace_id=workspace_id, mirrored_azure_databricks_catalog_id=mirrored_azure_databricks_catalog_new.id, display_name=date_str_updated, return_item=True)
|
50
|
+
self.assertEqual(mirrored_azure_databricks_catalog_updated.display_name, date_str_updated)
|
51
|
+
|
52
|
+
mirrored_azure_databricks_catalog_updated = fcc.update_mirrored_azure_databricks_catalog_definition(workspace_id=workspace_id, mirrored_azure_databricks_catalog_id=mirrored_azure_databricks_catalog_new.id, definition=definition)
|
53
|
+
self.assertEqual(mirrored_azure_databricks_catalog_updated.status_code, 200)
|
54
|
+
|
55
|
+
for mirrored_azure_databricks_catalog in mirrored_azure_databricks_catalog:
|
56
|
+
if mirrored_azure_databricks_catalog.id != item_id:
|
57
|
+
resp = fcc.delete_mirrored_azure_databricks_catalog(workspace_id=workspace_id, mirrored_azure_databricks_catalog_id=mirrored_azure_databricks_catalog.id)
|
58
|
+
self.assertEqual(resp, 200)
|
59
|
+
|
60
|
+
catalogs = fcc.discover_mirrored_azure_databricks_catalogs(workspace_id=workspace_id, databricks_workspace_connection_id=connection_id)
|
61
|
+
self.assertEqual(len([cat["name"] for cat in catalogs if cat["name"] == catalog_name]), 1)
|
62
|
+
|
63
|
+
schemas = fcc.discover_mirrored_azure_databricks_catalog_schemas(workspace_id=workspace_id, catalog_name=catalog_name, databricks_workspace_connection_id=connection_id)
|
64
|
+
self.assertEqual(len([cat["name"] for cat in schemas if cat["name"] == schema_name]), 1)
|
65
|
+
|
66
|
+
tables = fcc.discover_mirrored_azure_databricks_catalog_tables(workspace_id=workspace_id, catalog_name=catalog_name, schema_name=schema_name, databricks_workspace_connection_id=connection_id)
|
67
|
+
self.assertEqual(len([cat["name"] for cat in tables if cat["name"] == table_name]), 1)
|
68
|
+
|
69
|
+
status = fcc.refresh_mirrored_azure_databricks_catalog_metadata(workspace_id=workspace_id,
|
70
|
+
item_id= item_id, wait_for_completion=False)
|
71
|
+
self.assertEqual(status, 202)
|
72
|
+
|
73
|
+
|
74
|
+
|
75
|
+
|
76
|
+
|
77
|
+
|
78
|
+
|
79
|
+
|
80
|
+
|
81
|
+
|
@@ -14,14 +14,12 @@ class TestFabricClientCore(unittest.TestCase):
|
|
14
14
|
self.fc = FabricClientCore()
|
15
15
|
|
16
16
|
datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
|
17
|
-
self.item_name = "testitem" + datetime_str
|
18
|
-
self.item_type = "Notebook"
|
19
17
|
|
20
18
|
|
21
19
|
def test_ml_experiments(self):
|
22
20
|
|
23
21
|
fc = self.fc
|
24
|
-
workspace_id = '
|
22
|
+
workspace_id = '05bc5baa-ef02-4a31-ab20-158a478151d3'
|
25
23
|
mlexperiment_name = "mlexp" + datetime.now().strftime("%Y%m%d%H%M%S")
|
26
24
|
mlexperiment_name2 = "mlexp2" + datetime.now().strftime("%Y%m%d%H%M%S")
|
27
25
|
|
@@ -14,14 +14,13 @@ class TestFabricClientCore(unittest.TestCase):
|
|
14
14
|
self.fc = FabricClientCore()
|
15
15
|
|
16
16
|
datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
|
17
|
-
|
18
|
-
self.item_type = "Notebook"
|
17
|
+
|
19
18
|
|
20
19
|
|
21
20
|
def test_ml_models(self):
|
22
21
|
|
23
22
|
fc = self.fc
|
24
|
-
workspace_id = '
|
23
|
+
workspace_id = '05bc5baa-ef02-4a31-ab20-158a478151d3'
|
25
24
|
datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
|
26
25
|
model_name = "mlm" + datetime_str
|
27
26
|
|
@@ -19,7 +19,7 @@ class TestFabricClientCore(unittest.TestCase):
|
|
19
19
|
def test_mounted_adf(self):
|
20
20
|
|
21
21
|
fc = self.fc
|
22
|
-
workspace_id = '
|
22
|
+
workspace_id = '05bc5baa-ef02-4a31-ab20-158a478151d3'
|
23
23
|
|
24
24
|
definition = {'parts': [{'path': 'mountedDataFactory-content.json',
|
25
25
|
'payload': 'ewogICJkYXRhRmFjdG9yeVJlc291cmNlSWQiOiAiL3N1YnNjcmlwdGlvbnMvYzc3Y2M4ZmMtNDNiYi00ZDQ0LWJkYzUtNmUyMDUxMWVkMmE4L3Jlc291cmNlR3JvdXBzL2ZhYnJpY2RlbW8vcHJvdmlkZXJzL01pY3Jvc29mdC5EYXRhRmFjdG9yeS9mYWN0b3JpZXMvZmFicmljYWRmMjAyNTAzMDYiCn0=',
|
@@ -21,11 +21,12 @@ class TestFabricClientCore(unittest.TestCase):
|
|
21
21
|
def test_notebooks(self):
|
22
22
|
|
23
23
|
fc = self.fc
|
24
|
-
workspace_id = '
|
24
|
+
workspace_id = '05bc5baa-ef02-4a31-ab20-158a478151d3'
|
25
|
+
item_id = "9a2edf0f-2318-4179-80d0-1002f3dae7b1"
|
25
26
|
|
26
27
|
notebook_name = "notebook" + datetime.now().strftime("%Y%m%d%H%M%S")
|
27
28
|
|
28
|
-
notebook_w_content = fc.get_notebook(workspace_id,
|
29
|
+
notebook_w_content = fc.get_notebook(workspace_id, notebook_id=item_id)
|
29
30
|
|
30
31
|
definition = fc.get_notebook_definition(workspace_id, notebook_w_content.id)
|
31
32
|
|
@@ -14,8 +14,8 @@ class TestFabricClientCore(unittest.TestCase):
|
|
14
14
|
|
15
15
|
fc = self.fc
|
16
16
|
|
17
|
-
workspace_id = '
|
18
|
-
item_id = "
|
17
|
+
workspace_id = '05bc5baa-ef02-4a31-ab20-158a478151d3'
|
18
|
+
item_id = "63023672-df30-4bfb-adce-f292beb357af"
|
19
19
|
|
20
20
|
resp = fc.list_data_access_roles(workspace_id=workspace_id, item_id=item_id)
|
21
21
|
self.assertEqual(len(resp), 2)
|
@@ -21,12 +21,12 @@ class TestFabricClientCore(unittest.TestCase):
|
|
21
21
|
|
22
22
|
fc = self.fc
|
23
23
|
|
24
|
-
workspace_id = '
|
24
|
+
workspace_id = '05bc5baa-ef02-4a31-ab20-158a478151d3'
|
25
25
|
|
26
26
|
list_dashboards = fc.list_dashboards(workspace_id)
|
27
27
|
dashboard_names = [dashboard.display_name for dashboard in list_dashboards]
|
28
28
|
self.assertGreater(len(list_dashboards), 0)
|
29
|
-
self.assertIn("
|
29
|
+
self.assertIn("dashboardpbi", dashboard_names)
|
30
30
|
|
31
31
|
# list_datamarts = fc.list_datamarts(workspace_id)
|
32
32
|
# datamart_names = [datamart.display_name for datamart in list_datamarts]
|
@@ -19,8 +19,7 @@ class TestFabricClientCore(unittest.TestCase):
|
|
19
19
|
def test_reflex(self):
|
20
20
|
|
21
21
|
fc = self.fc
|
22
|
-
workspace_id = '
|
23
|
-
|
22
|
+
workspace_id = '05bc5baa-ef02-4a31-ab20-158a478151d3'
|
24
23
|
datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
|
25
24
|
|
26
25
|
reflex_name = "reflex" + datetime_str
|
@@ -20,7 +20,7 @@ class TestFabricClientCore(unittest.TestCase):
|
|
20
20
|
def test_semantic_models(self):
|
21
21
|
|
22
22
|
fc = self.fc
|
23
|
-
workspace_id = '
|
23
|
+
workspace_id = '05bc5baa-ef02-4a31-ab20-158a478151d3'
|
24
24
|
datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
|
25
25
|
semantic_model_name = "semanticmodel" + datetime_str
|
26
26
|
|
@@ -11,10 +11,9 @@ class TestFabricClientCore(unittest.TestCase):
|
|
11
11
|
super(TestFabricClientCore, self).__init__(*args, **kwargs)
|
12
12
|
#load_dotenv()
|
13
13
|
self.fc = FabricClientCore()
|
14
|
-
self.workspace_id = '
|
15
|
-
|
16
|
-
self.
|
17
|
-
self.lakehouse_shortcut = "9d1e8b07-2fea-4fc5-872c-fb586d085149"
|
14
|
+
self.workspace_id = '05bc5baa-ef02-4a31-ab20-158a478151d3'
|
15
|
+
self.lakehouse_target = "63023672-df30-4bfb-adce-f292beb357af"
|
16
|
+
self.lakehouse_shortcut = "82c01e0c-4cee-4a62-9806-870699ced699"
|
18
17
|
|
19
18
|
datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
|
20
19
|
self.shortcutname = "shortcut" + datetime_str
|
@@ -16,7 +16,7 @@ class TestFabricClientCore(unittest.TestCase):
|
|
16
16
|
|
17
17
|
def test_spark_workspace_custom_pools(self):
|
18
18
|
fc = self.fc
|
19
|
-
workspace_id = '
|
19
|
+
workspace_id = '05bc5baa-ef02-4a31-ab20-158a478151d3'
|
20
20
|
pool_name = "pool" + datetime.now().strftime("%Y%m%d%H%M%S")
|
21
21
|
# List
|
22
22
|
|
@@ -63,7 +63,7 @@ class TestFabricClientCore(unittest.TestCase):
|
|
63
63
|
|
64
64
|
def test_workspace_settings(self):
|
65
65
|
fc = self.fc
|
66
|
-
workspace_id = '
|
66
|
+
workspace_id = '05bc5baa-ef02-4a31-ab20-158a478151d3'
|
67
67
|
|
68
68
|
# Get
|
69
69
|
|
@@ -14,7 +14,7 @@ class TestFabricClientCore(unittest.TestCase):
|
|
14
14
|
def test_spark_job_definitions(self):
|
15
15
|
|
16
16
|
fc = self.fc
|
17
|
-
workspace_id = '
|
17
|
+
workspace_id = '05bc5baa-ef02-4a31-ab20-158a478151d3'
|
18
18
|
|
19
19
|
datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
|
20
20
|
spark_job_definition_name = f"sjd{datetime_str}"
|
@@ -0,0 +1,28 @@
|
|
1
|
+
import unittest
|
2
|
+
from dotenv import load_dotenv
|
3
|
+
from msfabricpysdkcore import FabricClientCore
|
4
|
+
from datetime import datetime
|
5
|
+
load_dotenv()
|
6
|
+
|
7
|
+
class TestFabricClientCore(unittest.TestCase):
|
8
|
+
|
9
|
+
def __init__(self, *args, **kwargs):
|
10
|
+
super(TestFabricClientCore, self).__init__(*args, **kwargs)
|
11
|
+
self.fcc = FabricClientCore()
|
12
|
+
|
13
|
+
def test_sql_endpoint(self):
|
14
|
+
fcc = self.fcc
|
15
|
+
|
16
|
+
workspace_id = "05bc5baa-ef02-4a31-ab20-158a478151d3"
|
17
|
+
item_id = "d21012a1-f306-4cf1-a21b-f8ae55c17642"
|
18
|
+
|
19
|
+
response = fcc.refresh_sql_endpoint_metadata(workspace_id=workspace_id, sql_endpoint_id=item_id, wait_for_completion=False)
|
20
|
+
self.assertIn(response.status_code, [200, 202])
|
21
|
+
|
22
|
+
|
23
|
+
|
24
|
+
|
25
|
+
|
26
|
+
|
27
|
+
|
28
|
+
|
@@ -21,7 +21,7 @@ class TestFabricClientCore(unittest.TestCase):
|
|
21
21
|
def test_warehouses(self):
|
22
22
|
|
23
23
|
fc = self.fc
|
24
|
-
workspace_id = '
|
24
|
+
workspace_id = '05bc5baa-ef02-4a31-ab20-158a478151d3'
|
25
25
|
|
26
26
|
datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
|
27
27
|
warehouse1 = f"wh{datetime_str}"
|
@@ -32,7 +32,7 @@ class TestFabricClientCore(unittest.TestCase):
|
|
32
32
|
# def test_assign_to_capacity(self):
|
33
33
|
|
34
34
|
result_status_code = fc.assign_to_capacity(workspace_id=ws.id,
|
35
|
-
capacity_id="
|
35
|
+
capacity_id="9e7e757d-d567-4fb3-bc4f-d230aabf2a00")
|
36
36
|
self.assertEqual(result_status_code, 202)
|
37
37
|
|
38
38
|
|
@@ -44,7 +44,7 @@ class TestFabricClientCore(unittest.TestCase):
|
|
44
44
|
|
45
45
|
for ws in result:
|
46
46
|
if ws.display_name == display_name:
|
47
|
-
self.assertEqual(ws.capacity_id, "
|
47
|
+
self.assertEqual(ws.capacity_id, "9e7e757d-d567-4fb3-bc4f-d230aabf2a00")
|
48
48
|
|
49
49
|
|
50
50
|
# def test_get_workspace_by_name(self):
|
@@ -80,7 +80,7 @@ class TestFabricClientCore(unittest.TestCase):
|
|
80
80
|
# Get get_workspace_role_assignment
|
81
81
|
|
82
82
|
result = fc.get_workspace_role_assignment(workspace_id = ws.id,
|
83
|
-
workspace_role_assignment_id = "
|
83
|
+
workspace_role_assignment_id = user["id"])
|
84
84
|
|
85
85
|
self.assertEqual(result["role"], "Member")
|
86
86
|
|
@@ -88,7 +88,7 @@ class TestFabricClientCore(unittest.TestCase):
|
|
88
88
|
|
89
89
|
result_status_code = fc.update_workspace_role_assignment(workspace_id = ws.id,
|
90
90
|
role = "Contributor",
|
91
|
-
workspace_role_assignment_id=
|
91
|
+
workspace_role_assignment_id= user["id"])
|
92
92
|
|
93
93
|
self.assertEqual(result_status_code, 200)
|
94
94
|
|
@@ -100,7 +100,7 @@ class TestFabricClientCore(unittest.TestCase):
|
|
100
100
|
|
101
101
|
# def test_delete_role_assignment(self):
|
102
102
|
result_status_code = fc.delete_workspace_role_assignment(workspace_id = ws.id,
|
103
|
-
workspace_role_assignment_id = "
|
103
|
+
workspace_role_assignment_id = user["id"])
|
104
104
|
self.assertEqual(result_status_code, 200)
|
105
105
|
|
106
106
|
# def test_get_workspace_role_assignments(self):
|
@@ -141,11 +141,11 @@ class TestFabricClientCore(unittest.TestCase):
|
|
141
141
|
result = self.fc.list_capacities()
|
142
142
|
self.assertTrue(len(result) > 0)
|
143
143
|
cap_ids = [cap.id for cap in result]
|
144
|
-
self.assertIn("
|
144
|
+
self.assertIn("9e7e757d-d567-4fb3-bc4f-d230aabf2a00", cap_ids)
|
145
145
|
|
146
146
|
def test_get_capacity(self):
|
147
|
-
capacity = self.fc.get_capacity(capacity_id = "
|
148
|
-
self.assertEqual(capacity.id, "
|
147
|
+
capacity = self.fc.get_capacity(capacity_id = "9e7e757d-d567-4fb3-bc4f-d230aabf2a00")
|
148
|
+
self.assertEqual(capacity.id, "9e7e757d-d567-4fb3-bc4f-d230aabf2a00")
|
149
149
|
|
150
150
|
cap = self.fc.get_capacity(capacity_name= capacity.display_name)
|
151
151
|
|