msfabricpysdkcore 0.2.7__py3-none-any.whl → 0.2.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- msfabricpysdkcore/coreapi.py +6 -6
- msfabricpysdkcore/tests/__init__.py +0 -0
- msfabricpysdkcore/tests/test_admin_apis.py +174 -0
- msfabricpysdkcore/tests/test_admin_tags.py +46 -0
- msfabricpysdkcore/tests/test_apache_airflow_job.py +60 -0
- msfabricpysdkcore/tests/test_connection.py +111 -0
- msfabricpysdkcore/tests/test_copy_jobs.py +60 -0
- msfabricpysdkcore/tests/test_dataflows.py +60 -0
- msfabricpysdkcore/tests/test_datapipelines.py +60 -0
- msfabricpysdkcore/tests/test_deployment_pipelinev2.py +135 -0
- msfabricpysdkcore/tests/test_digital_twin_builder.py +60 -0
- msfabricpysdkcore/tests/test_domains.py +119 -0
- msfabricpysdkcore/tests/test_environments.py +121 -0
- msfabricpysdkcore/tests/test_evenstreams.py +57 -0
- msfabricpysdkcore/tests/test_eventhouses.py +63 -0
- msfabricpysdkcore/tests/test_eventstream_topology.py +82 -0
- msfabricpysdkcore/tests/test_external_data_shares.py +51 -0
- msfabricpysdkcore/tests/test_fabric_azure_client.py +80 -0
- msfabricpysdkcore/tests/test_folders.py +56 -0
- msfabricpysdkcore/tests/test_gateways.py +99 -0
- msfabricpysdkcore/tests/test_git.py +66 -0
- msfabricpysdkcore/tests/test_graphqlapi.py +44 -0
- msfabricpysdkcore/tests/test_items.py +97 -0
- msfabricpysdkcore/tests/test_jobs.py +98 -0
- msfabricpysdkcore/tests/test_kql_dashboards.py +63 -0
- msfabricpysdkcore/tests/test_kql_queryset.py +60 -0
- msfabricpysdkcore/tests/test_kqldatabases.py +56 -0
- msfabricpysdkcore/tests/test_lakehouse.py +89 -0
- msfabricpysdkcore/tests/test_managed_private_endpoints.py +62 -0
- msfabricpysdkcore/tests/test_mirrored_azuredatabricks_catalog.py +81 -0
- msfabricpysdkcore/tests/test_mirroreddatabases.py +80 -0
- msfabricpysdkcore/tests/test_ml_experiments.py +45 -0
- msfabricpysdkcore/tests/test_ml_models.py +46 -0
- msfabricpysdkcore/tests/test_mounted_adf.py +64 -0
- msfabricpysdkcore/tests/test_notebooks.py +58 -0
- msfabricpysdkcore/tests/test_one_lake_data_access_security.py +63 -0
- msfabricpysdkcore/tests/test_other_items.py +45 -0
- msfabricpysdkcore/tests/test_reflex.py +56 -0
- msfabricpysdkcore/tests/test_reports.py +56 -0
- msfabricpysdkcore/tests/test_semantic_model.py +56 -0
- msfabricpysdkcore/tests/test_shortcuts.py +59 -0
- msfabricpysdkcore/tests/test_spark.py +91 -0
- msfabricpysdkcore/tests/test_sparkjobdefinition.py +55 -0
- msfabricpysdkcore/tests/test_sql_endpoint.py +28 -0
- msfabricpysdkcore/tests/test_sqldatabases.py +45 -0
- msfabricpysdkcore/tests/test_tags.py +28 -0
- msfabricpysdkcore/tests/test_variable_libary.py +61 -0
- msfabricpysdkcore/tests/test_warehouses.py +50 -0
- msfabricpysdkcore/tests/test_workspaces_capacities.py +159 -0
- msfabricpysdkcore/workspace.py +22 -4
- {msfabricpysdkcore-0.2.7.dist-info → msfabricpysdkcore-0.2.8.dist-info}/METADATA +1 -1
- msfabricpysdkcore-0.2.8.dist-info/RECORD +78 -0
- msfabricpysdkcore-0.2.7.dist-info/RECORD +0 -30
- {msfabricpysdkcore-0.2.7.dist-info → msfabricpysdkcore-0.2.8.dist-info}/WHEEL +0 -0
- {msfabricpysdkcore-0.2.7.dist-info → msfabricpysdkcore-0.2.8.dist-info}/licenses/LICENSE +0 -0
- {msfabricpysdkcore-0.2.7.dist-info → msfabricpysdkcore-0.2.8.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,60 @@
|
|
1
|
+
import unittest
|
2
|
+
from datetime import datetime
|
3
|
+
from dotenv import load_dotenv
|
4
|
+
from time import sleep
|
5
|
+
from msfabricpysdkcore.coreapi import FabricClientCore
|
6
|
+
|
7
|
+
load_dotenv()
|
8
|
+
|
9
|
+
class TestFabricClientCore(unittest.TestCase):
|
10
|
+
|
11
|
+
def __init__(self, *args, **kwargs):
|
12
|
+
super(TestFabricClientCore, self).__init__(*args, **kwargs)
|
13
|
+
#load_dotenv()
|
14
|
+
self.fc = FabricClientCore()
|
15
|
+
|
16
|
+
def test_kql_querysets(self):
|
17
|
+
|
18
|
+
fc = self.fc
|
19
|
+
workspace_id = '05bc5baa-ef02-4a31-ab20-158a478151d3'
|
20
|
+
|
21
|
+
kql_queryset_name = "kqlqueryset12"
|
22
|
+
kqlq_w_content = fc.get_kql_queryset(workspace_id, kql_queryset_name=kql_queryset_name)
|
23
|
+
|
24
|
+
definition = fc.get_kql_queryset_definition(workspace_id, kqlq_w_content.id)
|
25
|
+
self.assertIsNotNone(definition)
|
26
|
+
self.assertIn("definition", definition)
|
27
|
+
definition = definition["definition"]
|
28
|
+
|
29
|
+
self.assertIsNotNone(kqlq_w_content.id)
|
30
|
+
self.assertEqual(kqlq_w_content.display_name, kql_queryset_name)
|
31
|
+
|
32
|
+
datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
|
33
|
+
kql_queryset_new = "kqlq" + datetime_str
|
34
|
+
|
35
|
+
kqlq = fc.create_kql_queryset(workspace_id, definition=definition, display_name=kql_queryset_new)
|
36
|
+
self.assertIsNotNone(kqlq.id)
|
37
|
+
self.assertEqual(kqlq.display_name, kql_queryset_new)
|
38
|
+
|
39
|
+
fc.update_kql_queryset_definition(workspace_id, kqlq.id, definition=definition)
|
40
|
+
kqlq = fc.get_kql_queryset(workspace_id, kqlq.id)
|
41
|
+
self.assertEqual(kqlq.display_name, kql_queryset_new)
|
42
|
+
self.assertIsNotNone(kqlq.definition)
|
43
|
+
|
44
|
+
kqlqs = fc.list_kql_querysets(workspace_id)
|
45
|
+
kqlq_names = [kql.display_name for kql in kqlqs]
|
46
|
+
self.assertGreater(len(kqlqs), 0)
|
47
|
+
self.assertIn(kql_queryset_new, kqlq_names)
|
48
|
+
|
49
|
+
kqlq = fc.get_kql_queryset(workspace_id, kql_queryset_name=kql_queryset_new)
|
50
|
+
self.assertIsNotNone(kqlq.id)
|
51
|
+
self.assertEqual(kqlq.display_name, kql_queryset_new)
|
52
|
+
|
53
|
+
kqlq2 = fc.update_kql_queryset(workspace_id, kql_queryset_id=kqlq.id, display_name=f"{kql_queryset_new}2", return_item=True)
|
54
|
+
|
55
|
+
kqlq = fc.get_kql_queryset(workspace_id, kql_queryset_id=kqlq.id)
|
56
|
+
self.assertEqual(kqlq.display_name, f"{kql_queryset_new}2")
|
57
|
+
self.assertEqual(kqlq.id, kqlq2.id)
|
58
|
+
|
59
|
+
status_code = fc.delete_kql_queryset(workspace_id, kqlq.id)
|
60
|
+
self.assertEqual(status_code, 200)
|
@@ -0,0 +1,56 @@
|
|
1
|
+
import unittest
|
2
|
+
from datetime import datetime
|
3
|
+
from dotenv import load_dotenv
|
4
|
+
from time import sleep
|
5
|
+
from msfabricpysdkcore.coreapi import FabricClientCore
|
6
|
+
|
7
|
+
load_dotenv()
|
8
|
+
|
9
|
+
class TestFabricClientCore(unittest.TestCase):
|
10
|
+
|
11
|
+
def __init__(self, *args, **kwargs):
|
12
|
+
super(TestFabricClientCore, self).__init__(*args, **kwargs)
|
13
|
+
#load_dotenv()
|
14
|
+
self.fc = FabricClientCore()
|
15
|
+
|
16
|
+
def test_kql_database(self):
|
17
|
+
|
18
|
+
fc = self.fc
|
19
|
+
workspace_id = '05bc5baa-ef02-4a31-ab20-158a478151d3'
|
20
|
+
eventhouse_id = "71994015-66d8-4df2-b57d-46afe7440209"
|
21
|
+
|
22
|
+
creation_payload = {"databaseType" : "ReadWrite",
|
23
|
+
"parentEventhouseItemId" : eventhouse_id}
|
24
|
+
|
25
|
+
datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
|
26
|
+
kqldb_name = "kql" + datetime_str
|
27
|
+
kqldb = fc.create_kql_database(workspace_id = workspace_id, display_name=kqldb_name,
|
28
|
+
creation_payload=creation_payload)
|
29
|
+
self.assertEqual(kqldb.display_name, kqldb_name)
|
30
|
+
|
31
|
+
kql_databases = fc.list_kql_databases(workspace_id)
|
32
|
+
kql_database_names = [kqldb.display_name for kqldb in kql_databases]
|
33
|
+
self.assertGreater(len(kql_databases), 0)
|
34
|
+
self.assertIn(kqldb_name, kql_database_names)
|
35
|
+
|
36
|
+
kqldb = fc.get_kql_database(workspace_id, kql_database_name=kqldb_name)
|
37
|
+
self.assertIsNotNone(kqldb.id)
|
38
|
+
self.assertEqual(kqldb.display_name, kqldb_name)
|
39
|
+
|
40
|
+
new_name = kqldb_name+"2"
|
41
|
+
kqldb2 = fc.update_kql_database(workspace_id, kqldb.id, display_name=new_name, return_item=True)
|
42
|
+
|
43
|
+
kqldb = fc.get_kql_database(workspace_id, kql_database_id=kqldb.id)
|
44
|
+
self.assertEqual(kqldb.display_name, new_name)
|
45
|
+
self.assertEqual(kqldb.id, kqldb2.id)
|
46
|
+
|
47
|
+
response = fc.update_kql_database_definition(workspace_id, kqldb.id, kqldb.definition)
|
48
|
+
self.assertIn(response.status_code, [200, 202])
|
49
|
+
|
50
|
+
definition = fc.get_kql_database_definition(workspace_id, kql_database_id=kqldb.id)
|
51
|
+
self.assertIn("definition", definition)
|
52
|
+
self.assertIn("parts", definition["definition"])
|
53
|
+
self.assertGreaterEqual(len(definition["definition"]["parts"]), 3)
|
54
|
+
|
55
|
+
status_code = fc.delete_kql_database(workspace_id, kqldb.id)
|
56
|
+
self.assertEqual(status_code, 200)
|
@@ -0,0 +1,89 @@
|
|
1
|
+
import unittest
|
2
|
+
from datetime import datetime
|
3
|
+
from dotenv import load_dotenv
|
4
|
+
from time import sleep
|
5
|
+
from msfabricpysdkcore.coreapi import FabricClientCore
|
6
|
+
|
7
|
+
load_dotenv()
|
8
|
+
|
9
|
+
class TestFabricClientCore(unittest.TestCase):
|
10
|
+
|
11
|
+
def __init__(self, *args, **kwargs):
|
12
|
+
super(TestFabricClientCore, self).__init__(*args, **kwargs)
|
13
|
+
#load_dotenv()
|
14
|
+
self.fc = FabricClientCore()
|
15
|
+
|
16
|
+
def test_lakehouse(self):
|
17
|
+
|
18
|
+
lakehouse2 = "lh2" + datetime.now().strftime("%Y%m%d%H%M%S")
|
19
|
+
lakehouse3 = "lh3" + datetime.now().strftime("%Y%m%d%H%M%S")
|
20
|
+
|
21
|
+
workspace_id = '05bc5baa-ef02-4a31-ab20-158a478151d3'
|
22
|
+
lhs = self.fc.list_lakehouses(workspace_id=workspace_id, with_properties=True)
|
23
|
+
lh = [lh_ for lh_ in lhs if lh_.display_name == "lakelhousewlabels"][0]
|
24
|
+
tables = lh.list_tables()
|
25
|
+
table_names = [t["name"] for t in tables]
|
26
|
+
self.assertIn("dimension_customer", table_names)
|
27
|
+
|
28
|
+
lakehouse = self.fc.get_item(workspace_id=workspace_id, item_name="lakelhousewlabels", item_type="Lakehouse")
|
29
|
+
self.assertIsNotNone(lakehouse.properties)
|
30
|
+
lakehouse_id = lakehouse.id
|
31
|
+
date_str = datetime.now().strftime("%Y%m%d%H%M%S")
|
32
|
+
table_name = f"table{date_str}"
|
33
|
+
|
34
|
+
|
35
|
+
status_code = self.fc.load_table(workspace_id=workspace_id, lakehouse_id=lakehouse_id, table_name=table_name,
|
36
|
+
path_type="File", relative_path="Files/to_share/dimension_customer.csv")
|
37
|
+
|
38
|
+
self.assertEqual(status_code, 202)
|
39
|
+
|
40
|
+
# Run on demand table maintenance
|
41
|
+
table_name_maintenance = "dimension_customer"
|
42
|
+
|
43
|
+
execution_data = {
|
44
|
+
"tableName": table_name_maintenance,
|
45
|
+
"optimizeSettings": {
|
46
|
+
"vOrder": True,
|
47
|
+
"zOrderBy": [
|
48
|
+
"CustomerKey",
|
49
|
+
]
|
50
|
+
},
|
51
|
+
"vacuumSettings": {
|
52
|
+
"retentionPeriod": "7:01:00:00"
|
53
|
+
}
|
54
|
+
}
|
55
|
+
|
56
|
+
response = self.fc.run_on_demand_table_maintenance(workspace_id=workspace_id, lakehouse_id=lakehouse_id,
|
57
|
+
execution_data = execution_data,
|
58
|
+
job_type = "TableMaintenance", wait_for_completion = False)
|
59
|
+
self.assertIn(response.status_code, [200, 202])
|
60
|
+
|
61
|
+
table_list = self.fc.list_tables(workspace_id=workspace_id, lakehouse_id=lakehouse_id)
|
62
|
+
table_names = [table["name"] for table in table_list]
|
63
|
+
|
64
|
+
self.assertIn(table_name, table_names)
|
65
|
+
|
66
|
+
fc = self.fc
|
67
|
+
|
68
|
+
lakehouse = fc.create_lakehouse(workspace_id=workspace_id, display_name=lakehouse2)
|
69
|
+
self.assertIsNotNone(lakehouse.id)
|
70
|
+
|
71
|
+
lakehouses = fc.list_lakehouses(workspace_id)
|
72
|
+
lakehouse_names = [lh.display_name for lh in lakehouses]
|
73
|
+
self.assertGreater(len(lakehouse_names), 0)
|
74
|
+
self.assertIn(lakehouse2, lakehouse_names)
|
75
|
+
|
76
|
+
lakehouse2 = fc.get_lakehouse(workspace_id=workspace_id, lakehouse_id=lakehouse.id)
|
77
|
+
self.assertEqual(lakehouse.id, lakehouse2.id)
|
78
|
+
|
79
|
+
sleep(20)
|
80
|
+
lakehouse2 = fc.update_lakehouse(workspace_id=workspace_id, lakehouse_id=lakehouse.id, display_name=lakehouse3, return_item=True)
|
81
|
+
self.assertEqual(lakehouse2.display_name, lakehouse3)
|
82
|
+
|
83
|
+
id = lakehouse2.id
|
84
|
+
|
85
|
+
lakehouse2 = fc.get_lakehouse(workspace_id=workspace_id, lakehouse_name=lakehouse3)
|
86
|
+
self.assertEqual(lakehouse2.id, id)
|
87
|
+
|
88
|
+
status_code = fc.delete_lakehouse(workspace_id=workspace_id, lakehouse_id=lakehouse.id)
|
89
|
+
self.assertEqual(status_code, 200)
|
@@ -0,0 +1,62 @@
|
|
1
|
+
import unittest
|
2
|
+
from datetime import datetime
|
3
|
+
from dotenv import load_dotenv
|
4
|
+
from time import sleep
|
5
|
+
from msfabricpysdkcore.coreapi import FabricClientCore
|
6
|
+
|
7
|
+
load_dotenv()
|
8
|
+
|
9
|
+
class TestFabricClientCore(unittest.TestCase):
|
10
|
+
|
11
|
+
def __init__(self, *args, **kwargs):
|
12
|
+
super(TestFabricClientCore, self).__init__(*args, **kwargs)
|
13
|
+
#load_dotenv()
|
14
|
+
self.fc = FabricClientCore()
|
15
|
+
|
16
|
+
def test_workspace_managed_private_endpoints(self):
|
17
|
+
|
18
|
+
fc = self.fc
|
19
|
+
datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
|
20
|
+
workspace_id = '05bc5baa-ef02-4a31-ab20-158a478151d3'
|
21
|
+
|
22
|
+
mpes = fc.list_workspace_managed_private_endpoints(workspace_id=workspace_id)
|
23
|
+
|
24
|
+
if len(mpes) > 0:
|
25
|
+
|
26
|
+
for mpe in mpes:
|
27
|
+
status_code = fc.delete_workspace_managed_private_endpoint(workspace_id=workspace_id,
|
28
|
+
managed_private_endpoint_id=mpe["id"])
|
29
|
+
self.assertEqual(status_code, 200)
|
30
|
+
sleep(60)
|
31
|
+
|
32
|
+
mpe = fc.create_workspace_managed_private_endpoint(workspace_id=workspace_id,
|
33
|
+
name=f'testmpe{datetime_str}',
|
34
|
+
target_private_link_resource_id='/subscriptions/c77cc8fc-43bb-4d44-bdc5-6e20511ed2a8/resourceGroups/fabricdemo/providers/Microsoft.Storage/storageAccounts/publicfabricdemo9039',
|
35
|
+
target_subresource_type='dfs',
|
36
|
+
request_message='testmessage')
|
37
|
+
|
38
|
+
mpes = fc.list_workspace_managed_private_endpoints(workspace_id=workspace_id)
|
39
|
+
|
40
|
+
self.assertIsNotNone(mpes)
|
41
|
+
self.assertGreater(len(mpes), 0)
|
42
|
+
|
43
|
+
mpe2 = fc.get_workspace_managed_private_endpoint(workspace_id=workspace_id,
|
44
|
+
managed_private_endpoint_id=mpe["id"])
|
45
|
+
|
46
|
+
self.assertEqual(mpe2["id"], mpe["id"])
|
47
|
+
|
48
|
+
self.assertIsNotNone(mpe2["connectionState"])
|
49
|
+
self.assertIn("targetPrivateLinkResourceId", mpe2)
|
50
|
+
self.assertEqual(mpe2["targetPrivateLinkResourceId"], "/subscriptions/c77cc8fc-43bb-4d44-bdc5-6e20511ed2a8/resourceGroups/fabricdemo/providers/Microsoft.Storage/storageAccounts/publicfabricdemo9039")
|
51
|
+
|
52
|
+
for _ in range(0, 20):
|
53
|
+
if mpe2["connectionState"]["status"] != "Pending":
|
54
|
+
sleep(30)
|
55
|
+
else:
|
56
|
+
status_code = fc.delete_workspace_managed_private_endpoint(workspace_id=workspace_id,
|
57
|
+
managed_private_endpoint_id=mpe["id"])
|
58
|
+
self.assertEqual(status_code, 200)
|
59
|
+
break
|
60
|
+
mpe2 = fc.get_workspace_managed_private_endpoint(workspace_id=workspace_id,
|
61
|
+
managed_private_endpoint_id=mpe["id"])
|
62
|
+
|
@@ -0,0 +1,81 @@
|
|
1
|
+
import unittest
|
2
|
+
from dotenv import load_dotenv
|
3
|
+
from msfabricpysdkcore import FabricClientCore
|
4
|
+
from datetime import datetime
|
5
|
+
load_dotenv()
|
6
|
+
|
7
|
+
class TestFabricClientCore(unittest.TestCase):
|
8
|
+
|
9
|
+
def __init__(self, *args, **kwargs):
|
10
|
+
super(TestFabricClientCore, self).__init__(*args, **kwargs)
|
11
|
+
self.fcc = FabricClientCore()
|
12
|
+
|
13
|
+
def test_mirrored_azure_databricks_catalog(self):
|
14
|
+
fcc = self.fcc
|
15
|
+
|
16
|
+
workspace_id = "05bc5baa-ef02-4a31-ab20-158a478151d3"
|
17
|
+
item_id = "eb5a54af-f282-4612-97c1-95120620b5d3"
|
18
|
+
connection_id = "f7ac4f29-a70e-4868-87a1-9cdd92eacfa0"
|
19
|
+
|
20
|
+
catalog_name = "unitycatalogdbxsweden"
|
21
|
+
schema_name = "testinternal"
|
22
|
+
table_name = "internal_customer"
|
23
|
+
|
24
|
+
mirrored_azure_databricks_catalog = fcc.list_mirrored_azure_databricks_catalogs(workspace_id=workspace_id)
|
25
|
+
for mirrored_azure_databricks_catalog in mirrored_azure_databricks_catalog:
|
26
|
+
if mirrored_azure_databricks_catalog.id != item_id:
|
27
|
+
resp = fcc.delete_mirrored_azure_databricks_catalog(workspace_id=workspace_id, mirrored_azure_databricks_catalog_id=mirrored_azure_databricks_catalog.id)
|
28
|
+
self.assertEqual(resp, 200)
|
29
|
+
|
30
|
+
mirrored_azure_databricks_catalog_definition = fcc.get_mirrored_azure_databricks_catalog_definition(workspace_id=workspace_id, mirrored_azure_databricks_catalog_id=item_id)
|
31
|
+
self.assertIn("definition", mirrored_azure_databricks_catalog_definition)
|
32
|
+
definition = mirrored_azure_databricks_catalog_definition["definition"]
|
33
|
+
|
34
|
+
date_str = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
35
|
+
date_str = date_str.replace(" ", "T").replace(":", "").replace("-", "")
|
36
|
+
date_str = f"copyjob{date_str}"
|
37
|
+
|
38
|
+
mirrored_azure_databricks_catalog_new = fcc.create_mirrored_azure_databricks_catalog(workspace_id=workspace_id, display_name=date_str, definition=definition)
|
39
|
+
|
40
|
+
self.assertEqual(mirrored_azure_databricks_catalog_new.display_name, date_str)
|
41
|
+
|
42
|
+
mirrored_azure_databricks_catalog_get = fcc.get_mirrored_azure_databricks_catalog(workspace_id=workspace_id, mirrored_azure_databricks_catalog_id=mirrored_azure_databricks_catalog_new.id)
|
43
|
+
self.assertEqual(mirrored_azure_databricks_catalog_get.display_name, date_str)
|
44
|
+
|
45
|
+
mirrored_azure_databricks_catalog = fcc.list_mirrored_azure_databricks_catalogs(workspace_id=workspace_id)
|
46
|
+
self.assertEqual(len(mirrored_azure_databricks_catalog), 2)
|
47
|
+
|
48
|
+
date_str_updated = date_str + "_updated"
|
49
|
+
mirrored_azure_databricks_catalog_updated = fcc.update_mirrored_azure_databricks_catalog(workspace_id=workspace_id, mirrored_azure_databricks_catalog_id=mirrored_azure_databricks_catalog_new.id, display_name=date_str_updated, return_item=True)
|
50
|
+
self.assertEqual(mirrored_azure_databricks_catalog_updated.display_name, date_str_updated)
|
51
|
+
|
52
|
+
mirrored_azure_databricks_catalog_updated = fcc.update_mirrored_azure_databricks_catalog_definition(workspace_id=workspace_id, mirrored_azure_databricks_catalog_id=mirrored_azure_databricks_catalog_new.id, definition=definition)
|
53
|
+
self.assertEqual(mirrored_azure_databricks_catalog_updated.status_code, 200)
|
54
|
+
|
55
|
+
for mirrored_azure_databricks_catalog in mirrored_azure_databricks_catalog:
|
56
|
+
if mirrored_azure_databricks_catalog.id != item_id:
|
57
|
+
resp = fcc.delete_mirrored_azure_databricks_catalog(workspace_id=workspace_id, mirrored_azure_databricks_catalog_id=mirrored_azure_databricks_catalog.id)
|
58
|
+
self.assertEqual(resp, 200)
|
59
|
+
|
60
|
+
catalogs = fcc.discover_mirrored_azure_databricks_catalogs(workspace_id=workspace_id, databricks_workspace_connection_id=connection_id)
|
61
|
+
self.assertEqual(len([cat["name"] for cat in catalogs if cat["name"] == catalog_name]), 1)
|
62
|
+
|
63
|
+
schemas = fcc.discover_mirrored_azure_databricks_catalog_schemas(workspace_id=workspace_id, catalog_name=catalog_name, databricks_workspace_connection_id=connection_id)
|
64
|
+
self.assertEqual(len([cat["name"] for cat in schemas if cat["name"] == schema_name]), 1)
|
65
|
+
|
66
|
+
tables = fcc.discover_mirrored_azure_databricks_catalog_tables(workspace_id=workspace_id, catalog_name=catalog_name, schema_name=schema_name, databricks_workspace_connection_id=connection_id)
|
67
|
+
self.assertEqual(len([cat["name"] for cat in tables if cat["name"] == table_name]), 1)
|
68
|
+
|
69
|
+
status = fcc.refresh_mirrored_azure_databricks_catalog_metadata(workspace_id=workspace_id,
|
70
|
+
item_id= item_id, wait_for_completion=False)
|
71
|
+
self.assertEqual(status, 202)
|
72
|
+
|
73
|
+
|
74
|
+
|
75
|
+
|
76
|
+
|
77
|
+
|
78
|
+
|
79
|
+
|
80
|
+
|
81
|
+
|
@@ -0,0 +1,80 @@
|
|
1
|
+
import unittest
|
2
|
+
from datetime import datetime
|
3
|
+
from dotenv import load_dotenv
|
4
|
+
from time import sleep
|
5
|
+
from msfabricpysdkcore.coreapi import FabricClientCore
|
6
|
+
|
7
|
+
load_dotenv()
|
8
|
+
|
9
|
+
class TestFabricClientCore(unittest.TestCase):
|
10
|
+
|
11
|
+
def __init__(self, *args, **kwargs):
|
12
|
+
super(TestFabricClientCore, self).__init__(*args, **kwargs)
|
13
|
+
#load_dotenv()
|
14
|
+
self.fc = FabricClientCore()
|
15
|
+
|
16
|
+
def test_mirrored_database(self):
|
17
|
+
|
18
|
+
fc = self.fc
|
19
|
+
workspace_id = '46425c13-5736-4285-972c-6d034020f3ff'
|
20
|
+
datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
|
21
|
+
|
22
|
+
mirrored_db_name = "mirrored_db" + datetime_str
|
23
|
+
|
24
|
+
# mirrored_db_w_content = fc.get_mirrored_database(workspace_id, mirrored_database_name="dfsdemo")
|
25
|
+
|
26
|
+
# status = fc.get_mirroring_status(workspace_id=workspace_id, mirrored_database_id=mirrored_db_w_content.id)
|
27
|
+
# self.assertIsNotNone(status)
|
28
|
+
# self.assertIn("status", status)
|
29
|
+
|
30
|
+
# status = status["status"]
|
31
|
+
|
32
|
+
# if status == 'Running':
|
33
|
+
# fc.stop_mirroring(workspace_id=workspace_id, mirrored_database_id=mirrored_db_w_content.id)
|
34
|
+
# sleep(60)
|
35
|
+
# fc.start_mirroring(workspace_id=workspace_id, mirrored_database_id=mirrored_db_w_content.id)
|
36
|
+
# else:
|
37
|
+
# fc.start_mirroring(workspace_id=workspace_id, mirrored_database_id=mirrored_db_w_content.id)
|
38
|
+
# sleep(60)
|
39
|
+
# fc.stop_mirroring(workspace_id=workspace_id, mirrored_database_id=mirrored_db_w_content.id)
|
40
|
+
|
41
|
+
# table_status = fc.get_tables_mirroring_status(workspace_id=workspace_id, mirrored_database_id=mirrored_db_w_content.id)
|
42
|
+
|
43
|
+
# self.assertIsNotNone(table_status)
|
44
|
+
# self.assertIn("data", table_status)
|
45
|
+
# for _ in range(5):
|
46
|
+
# if len(table_status["data"]) > 0:
|
47
|
+
# break
|
48
|
+
# sleep(60)
|
49
|
+
# table_status = fc.get_tables_mirroring_status(workspace_id=workspace_id, mirrored_database_id=mirrored_db_w_content.id)
|
50
|
+
# self.assertIn("sourceTableName", table_status["data"][0])
|
51
|
+
|
52
|
+
# fc.stop_mirroring(workspace_id=workspace_id, mirrored_database_id=mirrored_db_w_content.id)
|
53
|
+
|
54
|
+
# definition = fc.get_mirrored_database_definition(workspace_id, mirrored_db_w_content.id)
|
55
|
+
# self.assertIsNotNone(definition)
|
56
|
+
# self.assertIn("definition", definition)
|
57
|
+
# self.assertIn("parts", definition["definition"])
|
58
|
+
|
59
|
+
# mirrored_db = fc.create_mirrored_database(workspace_id, display_name=mirrored_db_name)
|
60
|
+
|
61
|
+
# mirrored_db_check = fc.get_mirrored_database(workspace_id, mirrored_database_id=mirrored_db.id)
|
62
|
+
# self.assertEqual(mirrored_db_check.display_name, mirrored_db_name)
|
63
|
+
# self.assertIsNotNone(mirrored_db_check.id)
|
64
|
+
# self.assertEqual(mirrored_db_check.id, mirrored_db_check.id)
|
65
|
+
|
66
|
+
# mirrored_dbs = fc.list_mirrored_databases(workspace_id)
|
67
|
+
# mirrored_db_names = [md.display_name for md in mirrored_dbs]
|
68
|
+
# self.assertGreater(len(mirrored_dbs), 0)
|
69
|
+
# self.assertIn(mirrored_db_name, mirrored_db_names)
|
70
|
+
|
71
|
+
# sleep(60)
|
72
|
+
|
73
|
+
# mirrored_db_2 = fc.update_mirrored_database(workspace_id, mirrored_db_check.id,
|
74
|
+
# display_name=f"u{mirrored_db_name}", return_item=True)
|
75
|
+
# mirrored_db_2 = fc.get_mirrored_database(workspace_id, mirrored_database_id=mirrored_db_2.id)
|
76
|
+
|
77
|
+
# self.assertEqual(mirrored_db_2.display_name, f"u{mirrored_db_name}")
|
78
|
+
|
79
|
+
# status_code = fc.delete_mirrored_database(workspace_id, mirrored_db_2.id)
|
80
|
+
# self.assertEqual(status_code, 200)
|
@@ -0,0 +1,45 @@
|
|
1
|
+
import unittest
|
2
|
+
from datetime import datetime
|
3
|
+
from dotenv import load_dotenv
|
4
|
+
from time import sleep
|
5
|
+
from msfabricpysdkcore.coreapi import FabricClientCore
|
6
|
+
|
7
|
+
load_dotenv()
|
8
|
+
|
9
|
+
class TestFabricClientCore(unittest.TestCase):
|
10
|
+
|
11
|
+
def __init__(self, *args, **kwargs):
|
12
|
+
super(TestFabricClientCore, self).__init__(*args, **kwargs)
|
13
|
+
#load_dotenv()
|
14
|
+
self.fc = FabricClientCore()
|
15
|
+
|
16
|
+
datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
|
17
|
+
|
18
|
+
|
19
|
+
def test_ml_experiments(self):
|
20
|
+
|
21
|
+
fc = self.fc
|
22
|
+
workspace_id = '05bc5baa-ef02-4a31-ab20-158a478151d3'
|
23
|
+
mlexperiment_name = "mlexp" + datetime.now().strftime("%Y%m%d%H%M%S")
|
24
|
+
mlexperiment_name2 = "mlexp2" + datetime.now().strftime("%Y%m%d%H%M%S")
|
25
|
+
|
26
|
+
ml_experiment = fc.create_ml_experiment(workspace_id, display_name=mlexperiment_name)
|
27
|
+
self.assertEqual(ml_experiment.display_name, mlexperiment_name)
|
28
|
+
|
29
|
+
ml_experiments = fc.list_ml_experiments(workspace_id)
|
30
|
+
ml_experiment_names = [mle.display_name for mle in ml_experiments]
|
31
|
+
self.assertGreater(len(ml_experiments), 0)
|
32
|
+
self.assertIn(mlexperiment_name, ml_experiment_names)
|
33
|
+
|
34
|
+
mle = fc.get_ml_experiment(workspace_id, ml_experiment_name=mlexperiment_name)
|
35
|
+
self.assertIsNotNone(mle.id)
|
36
|
+
self.assertEqual(mle.display_name, mlexperiment_name)
|
37
|
+
|
38
|
+
mle2 = fc.update_ml_experiment(workspace_id, mle.id, display_name=mlexperiment_name2, return_item=True)
|
39
|
+
|
40
|
+
mle = fc.get_ml_experiment(workspace_id, ml_experiment_id=mle.id)
|
41
|
+
self.assertEqual(mle.display_name, mlexperiment_name2)
|
42
|
+
self.assertEqual(mle.id, mle2.id)
|
43
|
+
|
44
|
+
status_code = fc.delete_ml_experiment(workspace_id, mle.id)
|
45
|
+
self.assertEqual(status_code, 200)
|
@@ -0,0 +1,46 @@
|
|
1
|
+
import unittest
|
2
|
+
from datetime import datetime
|
3
|
+
from dotenv import load_dotenv
|
4
|
+
from time import sleep
|
5
|
+
from msfabricpysdkcore.coreapi import FabricClientCore
|
6
|
+
|
7
|
+
load_dotenv()
|
8
|
+
|
9
|
+
class TestFabricClientCore(unittest.TestCase):
|
10
|
+
|
11
|
+
def __init__(self, *args, **kwargs):
|
12
|
+
super(TestFabricClientCore, self).__init__(*args, **kwargs)
|
13
|
+
#load_dotenv()
|
14
|
+
self.fc = FabricClientCore()
|
15
|
+
|
16
|
+
datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
|
17
|
+
|
18
|
+
|
19
|
+
|
20
|
+
def test_ml_models(self):
|
21
|
+
|
22
|
+
fc = self.fc
|
23
|
+
workspace_id = '05bc5baa-ef02-4a31-ab20-158a478151d3'
|
24
|
+
datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
|
25
|
+
model_name = "mlm" + datetime_str
|
26
|
+
|
27
|
+
ml_model = fc.create_ml_model(workspace_id, display_name=model_name)
|
28
|
+
self.assertEqual(ml_model.display_name, model_name)
|
29
|
+
|
30
|
+
ml_models = fc.list_ml_models(workspace_id)
|
31
|
+
ml_model_names = [ml.display_name for ml in ml_models]
|
32
|
+
self.assertGreater(len(ml_models), 0)
|
33
|
+
self.assertIn(model_name, ml_model_names)
|
34
|
+
|
35
|
+
mlm = fc.get_ml_model(workspace_id, ml_model_name=model_name)
|
36
|
+
self.assertIsNotNone(mlm.id)
|
37
|
+
self.assertEqual(mlm.display_name, model_name)
|
38
|
+
|
39
|
+
mlm2 = fc.update_ml_model(workspace_id=workspace_id,ml_model_id= mlm.id, description=model_name, return_item=True)
|
40
|
+
|
41
|
+
mlm = fc.get_ml_model(workspace_id, ml_model_id=mlm.id)
|
42
|
+
self.assertEqual(mlm.description, model_name)
|
43
|
+
self.assertEqual(mlm.id, mlm2.id)
|
44
|
+
|
45
|
+
status_code = fc.delete_ml_model(workspace_id, mlm.id)
|
46
|
+
self.assertEqual(status_code, 200)
|
@@ -0,0 +1,64 @@
|
|
1
|
+
import unittest
|
2
|
+
from datetime import datetime
|
3
|
+
from dotenv import load_dotenv
|
4
|
+
from time import sleep
|
5
|
+
from msfabricpysdkcore.coreapi import FabricClientCore
|
6
|
+
|
7
|
+
load_dotenv()
|
8
|
+
|
9
|
+
# class TestFabricClientCore(unittest.TestCase):
|
10
|
+
|
11
|
+
class TestFabricClientCore(unittest.TestCase):
|
12
|
+
|
13
|
+
def __init__(self, *args, **kwargs):
|
14
|
+
super(TestFabricClientCore, self).__init__(*args, **kwargs)
|
15
|
+
#load_dotenv()
|
16
|
+
self.fc = FabricClientCore()
|
17
|
+
|
18
|
+
|
19
|
+
def test_mounted_adf(self):
|
20
|
+
|
21
|
+
fc = self.fc
|
22
|
+
workspace_id = '05bc5baa-ef02-4a31-ab20-158a478151d3'
|
23
|
+
|
24
|
+
definition = {'parts': [{'path': 'mountedDataFactory-content.json',
|
25
|
+
'payload': 'ewogICJkYXRhRmFjdG9yeVJlc291cmNlSWQiOiAiL3N1YnNjcmlwdGlvbnMvYzc3Y2M4ZmMtNDNiYi00ZDQ0LWJkYzUtNmUyMDUxMWVkMmE4L3Jlc291cmNlR3JvdXBzL2ZhYnJpY2RlbW8vcHJvdmlkZXJzL01pY3Jvc29mdC5EYXRhRmFjdG9yeS9mYWN0b3JpZXMvZmFicmljYWRmMjAyNTAzMDYiCn0=',
|
26
|
+
'payloadType': 'InlineBase64'},
|
27
|
+
{'path': '.platform',
|
28
|
+
'payload': 'ewogICIkc2NoZW1hIjogImh0dHBzOi8vZGV2ZWxvcGVyLm1pY3Jvc29mdC5jb20vanNvbi1zY2hlbWFzL2ZhYnJpYy9naXRJbnRlZ3JhdGlvbi9wbGF0Zm9ybVByb3BlcnRpZXMvMi4wLjAvc2NoZW1hLmpzb24iLAogICJtZXRhZGF0YSI6IHsKICAgICJ0eXBlIjogIk1vdW50ZWREYXRhRmFjdG9yeSIsCiAgICAiZGlzcGxheU5hbWUiOiAiZmFicmljYWRmMjAyNTAzMDYiCiAgfSwKICAiY29uZmlnIjogewogICAgInZlcnNpb24iOiAiMi4wIiwKICAgICJsb2dpY2FsSWQiOiAiMDAwMDAwMDAtMDAwMC0wMDAwLTAwMDAtMDAwMDAwMDAwMDAwIgogIH0KfQ==',
|
29
|
+
'payloadType': 'InlineBase64'}]}
|
30
|
+
|
31
|
+
datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
|
32
|
+
|
33
|
+
mounted_adf_name = "mounted_adf" + datetime_str
|
34
|
+
mounted_adf = fc.create_mounted_data_factory(workspace_id,
|
35
|
+
display_name=mounted_adf_name,
|
36
|
+
definition=definition)
|
37
|
+
|
38
|
+
self.assertEqual(mounted_adf.display_name, mounted_adf_name)
|
39
|
+
|
40
|
+
mounted_adfs = fc.list_mounted_data_factories(workspace_id)
|
41
|
+
mounted_adf_names = [adf.display_name for adf in mounted_adfs]
|
42
|
+
self.assertGreater(len(mounted_adfs), 0)
|
43
|
+
self.assertIn(mounted_adf_name, mounted_adf_names)
|
44
|
+
|
45
|
+
adf = fc.get_mounted_data_factory(workspace_id, mounted_data_factory_name=mounted_adf_name)
|
46
|
+
self.assertIsNotNone(adf.id)
|
47
|
+
self.assertEqual(adf.display_name, mounted_adf_name)
|
48
|
+
|
49
|
+
adf2 = fc.update_mounted_data_factory(workspace_id, adf.id, display_name=f"{mounted_adf_name}2", return_item=True)
|
50
|
+
|
51
|
+
adf = fc.get_mounted_data_factory(workspace_id, mounted_data_factory_id=adf.id)
|
52
|
+
self.assertEqual(adf.display_name, f"{mounted_adf_name}2")
|
53
|
+
self.assertEqual(adf.id, adf2.id)
|
54
|
+
|
55
|
+
response = fc.update_mounted_data_factory_definition(workspace_id, mounted_data_factory_id=adf.id, definition=adf.definition)
|
56
|
+
self.assertIn(response.status_code, [200, 202])
|
57
|
+
|
58
|
+
definition = fc.get_mounted_data_factory_definition(workspace_id, mounted_data_factory_id=adf.id)
|
59
|
+
self.assertIn("definition", definition)
|
60
|
+
self.assertIn("parts", definition["definition"])
|
61
|
+
self.assertGreaterEqual(len(definition["definition"]["parts"]), 2)
|
62
|
+
|
63
|
+
status_code = fc.delete_mounted_data_factory(workspace_id, adf.id)
|
64
|
+
self.assertEqual(status_code, 200)
|
@@ -0,0 +1,58 @@
|
|
1
|
+
import unittest
|
2
|
+
from datetime import datetime
|
3
|
+
from dotenv import load_dotenv
|
4
|
+
from time import sleep
|
5
|
+
from msfabricpysdkcore.coreapi import FabricClientCore
|
6
|
+
|
7
|
+
load_dotenv()
|
8
|
+
|
9
|
+
class TestFabricClientCore(unittest.TestCase):
|
10
|
+
|
11
|
+
def __init__(self, *args, **kwargs):
|
12
|
+
super(TestFabricClientCore, self).__init__(*args, **kwargs)
|
13
|
+
#load_dotenv()
|
14
|
+
self.fc = FabricClientCore()
|
15
|
+
|
16
|
+
datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
|
17
|
+
self.item_name = "testitem" + datetime_str
|
18
|
+
self.item_type = "Notebook"
|
19
|
+
|
20
|
+
|
21
|
+
def test_notebooks(self):
|
22
|
+
|
23
|
+
fc = self.fc
|
24
|
+
workspace_id = '05bc5baa-ef02-4a31-ab20-158a478151d3'
|
25
|
+
item_id = "9a2edf0f-2318-4179-80d0-1002f3dae7b1"
|
26
|
+
|
27
|
+
notebook_name = "notebook" + datetime.now().strftime("%Y%m%d%H%M%S")
|
28
|
+
|
29
|
+
notebook_w_content = fc.get_notebook(workspace_id, notebook_id=item_id)
|
30
|
+
|
31
|
+
definition = fc.get_notebook_definition(workspace_id, notebook_w_content.id)
|
32
|
+
|
33
|
+
self.assertIsNotNone(definition)
|
34
|
+
self.assertIn("definition", definition)
|
35
|
+
definition = definition["definition"]
|
36
|
+
notebook = fc.create_notebook(workspace_id, definition=definition, display_name=notebook_name)
|
37
|
+
fc.update_notebook_definition(workspace_id, notebook.id, definition=definition)
|
38
|
+
notebook = fc.get_notebook(workspace_id, notebook_id=notebook.id)
|
39
|
+
self.assertEqual(notebook.display_name, notebook_name)
|
40
|
+
self.assertIsNotNone(notebook.definition)
|
41
|
+
|
42
|
+
notebooks = fc.list_notebooks(workspace_id)
|
43
|
+
notebook_names = [nb.display_name for nb in notebooks]
|
44
|
+
self.assertGreater(len(notebooks), 0)
|
45
|
+
self.assertIn(notebook_name, notebook_names)
|
46
|
+
|
47
|
+
nb = fc.get_notebook(workspace_id, notebook_name=notebook_name)
|
48
|
+
self.assertIsNotNone(nb.id)
|
49
|
+
self.assertEqual(nb.display_name, notebook_name)
|
50
|
+
|
51
|
+
nb2 = fc.update_notebook(workspace_id, notebook_id=nb.id, display_name=f"{notebook_name}2", return_item=True)
|
52
|
+
|
53
|
+
nb = fc.get_notebook(workspace_id, notebook_id=nb.id)
|
54
|
+
self.assertEqual(nb.display_name, f"{notebook_name}2")
|
55
|
+
self.assertEqual(nb.id, nb2.id)
|
56
|
+
|
57
|
+
status_code = fc.delete_notebook(workspace_id, nb.id)
|
58
|
+
self.assertEqual(status_code, 200)
|