msfabricpysdkcore 0.1.8__py3-none-any.whl → 0.2.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. msfabricpysdkcore/adminapi.py +178 -12
  2. msfabricpysdkcore/coreapi.py +1479 -99
  3. msfabricpysdkcore/item.py +45 -6
  4. msfabricpysdkcore/job_instance.py +2 -1
  5. msfabricpysdkcore/otheritems.py +159 -10
  6. msfabricpysdkcore/tests/__init__.py +0 -0
  7. msfabricpysdkcore/tests/test_admin_apis.py +174 -0
  8. msfabricpysdkcore/tests/test_connection.py +111 -0
  9. msfabricpysdkcore/tests/test_datapipelines.py +45 -0
  10. msfabricpysdkcore/tests/test_deployment_pipeline.py +63 -0
  11. msfabricpysdkcore/tests/test_domains.py +126 -0
  12. msfabricpysdkcore/tests/test_environments.py +114 -0
  13. msfabricpysdkcore/tests/test_evenhouses.py +56 -0
  14. msfabricpysdkcore/tests/test_evenstreams.py +52 -0
  15. msfabricpysdkcore/tests/test_external_data_shares.py +51 -0
  16. msfabricpysdkcore/tests/test_fabric_azure_client.py +80 -0
  17. msfabricpysdkcore/tests/test_gateways.py +99 -0
  18. msfabricpysdkcore/tests/test_git.py +66 -0
  19. msfabricpysdkcore/tests/test_graphqlapi.py +44 -0
  20. msfabricpysdkcore/tests/test_items.py +97 -0
  21. msfabricpysdkcore/tests/test_jobs.py +96 -0
  22. msfabricpysdkcore/tests/test_kql_dashboards.py +63 -0
  23. msfabricpysdkcore/tests/test_kql_queryset.py +64 -0
  24. msfabricpysdkcore/tests/test_kqldatabases.py +56 -0
  25. msfabricpysdkcore/tests/test_lakehouse.py +93 -0
  26. msfabricpysdkcore/tests/test_managed_private_endpoints.py +61 -0
  27. msfabricpysdkcore/tests/test_mirroreddatabases.py +80 -0
  28. msfabricpysdkcore/tests/test_ml_experiments.py +47 -0
  29. msfabricpysdkcore/tests/test_ml_models.py +47 -0
  30. msfabricpysdkcore/tests/test_mounted_adf.py +64 -0
  31. msfabricpysdkcore/tests/test_notebooks.py +57 -0
  32. msfabricpysdkcore/tests/test_one_lake_data_access_security.py +63 -0
  33. msfabricpysdkcore/tests/test_other_items.py +45 -0
  34. msfabricpysdkcore/tests/test_reflex.py +57 -0
  35. msfabricpysdkcore/tests/test_reports.py +56 -0
  36. msfabricpysdkcore/tests/test_semantic_model.py +56 -0
  37. msfabricpysdkcore/tests/test_shortcuts.py +60 -0
  38. msfabricpysdkcore/tests/test_spark.py +91 -0
  39. msfabricpysdkcore/tests/test_sparkjobdefinition.py +55 -0
  40. msfabricpysdkcore/tests/test_sqldatabases.py +45 -0
  41. msfabricpysdkcore/tests/test_warehouses.py +50 -0
  42. msfabricpysdkcore/tests/test_workspaces_capacities.py +159 -0
  43. msfabricpysdkcore/workspace.py +295 -16
  44. {msfabricpysdkcore-0.1.8.dist-info → msfabricpysdkcore-0.2.2.dist-info}/METADATA +261 -16
  45. msfabricpysdkcore-0.2.2.dist-info/RECORD +65 -0
  46. {msfabricpysdkcore-0.1.8.dist-info → msfabricpysdkcore-0.2.2.dist-info}/WHEEL +1 -1
  47. msfabricpysdkcore-0.1.8.dist-info/RECORD +0 -28
  48. {msfabricpysdkcore-0.1.8.dist-info → msfabricpysdkcore-0.2.2.dist-info}/LICENSE +0 -0
  49. {msfabricpysdkcore-0.1.8.dist-info → msfabricpysdkcore-0.2.2.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,99 @@
1
+ import unittest
2
+ from datetime import datetime
3
+ from dotenv import load_dotenv
4
+ from time import sleep
5
+ from msfabricpysdkcore.coreapi import FabricClientCore
6
+
7
+ load_dotenv()
8
+
9
+ class TestFabricClientCore(unittest.TestCase):
10
+
11
+ def __init__(self, *args, **kwargs):
12
+ super(TestFabricClientCore, self).__init__(*args, **kwargs)
13
+ #load_dotenv()
14
+ self.fc = FabricClientCore()
15
+
16
+ def test_gateways(self):
17
+
18
+ fc = self.fc
19
+
20
+ gateways = fc.list_gateways()
21
+
22
+ for gw in gateways:
23
+ if "publicKey" not in gw:
24
+ fc.delete_gateway(gw['id'])
25
+ if "publicKey" in gw:
26
+ gw_id = gw['id']
27
+
28
+ datetime_str = datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
29
+ display_name = 'fabricvnet-' + datetime_str
30
+ gwr = {'displayName': display_name,
31
+ 'capacityId': '339c785f-4489-46ae-a649-e049e7610479',
32
+ 'virtualNetworkAzureResource': {'virtualNetworkName': 'fabricvnet',
33
+ 'subnetName': 'default3',
34
+ 'resourceGroupName': 'fabricdemo',
35
+ 'subscriptionId': 'c77cc8fc-43bb-4d44-bdc5-6e20511ed2a8'},
36
+ 'inactivityMinutesBeforeSleep': 30,
37
+ 'numberOfMemberGateways': 2,
38
+ 'type': 'VirtualNetwork'}
39
+
40
+ gw = fc.create_gateway(gwr)
41
+ self.assertEqual(gw['displayName'], gwr['displayName'])
42
+
43
+ gateways = fc.list_gateways()
44
+ self.assertEqual(len(gateways), 2)
45
+
46
+ gateways = [g for g in gateways if g['displayName'] == display_name]
47
+ gw = gateways[0]
48
+
49
+ ras = fc.list_gateway_role_assignments(gw['id'])
50
+ self.assertEqual(len(ras), 1)
51
+
52
+ principal = {"id" : "755f273c-98f8-408c-a886-691794938bd8",
53
+ "type" : "ServicePrincipal"}
54
+
55
+ new_ras = fc.add_gateway_role_assignment(gw['id'], principal, 'ConnectionCreator')
56
+ self.assertIn("id", new_ras)
57
+ self.assertEqual(2, len(fc.list_gateway_role_assignments(gw['id'])))
58
+
59
+ new_ras = fc.update_gateway_role_assignment(gw['id'], new_ras['id'], 'Admin')
60
+ self.assertEqual('Admin', new_ras['role'])
61
+
62
+ new_ras_ = fc.get_gateway_role_assignment(gw['id'], new_ras['id'])
63
+ self.assertEqual('Admin', new_ras_['role'])
64
+ self.assertEqual(new_ras['id'], new_ras_['id'])
65
+
66
+ resp_code = fc.delete_gateway_role_assignment(gw['id'], new_ras['id'])
67
+ self.assertEqual(200, resp_code)
68
+ self.assertEqual(1, len(fc.list_gateway_role_assignments(gw['id'])))
69
+
70
+
71
+
72
+ gw_members = fc.list_gateway_members(gw_id)
73
+ self.assertGreater(len(gw_members), 0)
74
+ self.assertIn('id', gw_members[0])
75
+ self.assertIn('displayName', gw_members[0])
76
+
77
+ display_name_member = "surface_desktop" + datetime_str
78
+
79
+ gw_member = fc.update_gateway_member(gateway_id = gw_id, gateway_member_id = gw_id, display_name=display_name_member, enabled=True)
80
+ self.assertEqual(display_name_member, gw_member['displayName'])
81
+
82
+ gw_ = fc.get_gateway(gw["id"])
83
+ self.assertEqual(display_name, gw_['displayName'])
84
+
85
+ gwr = {
86
+ "type": "OnPremises",
87
+ "displayName": display_name,
88
+ "loadBalancingSetting": "Failover",
89
+ "allowCloudConnectionRefresh": False,
90
+ "allowCustomConnectors": False
91
+ }
92
+
93
+ gw_ = fc.update_gateway(gw_id, gwr)
94
+ self.assertEqual(display_name, gw_['displayName'])
95
+
96
+ resp_code = fc.delete_gateway(gw["id"])
97
+ self.assertEqual(200, resp_code)
98
+
99
+ self.assertEqual(len(fc.list_gateways()), 1)
@@ -0,0 +1,66 @@
1
+ import unittest
2
+ from msfabricpysdkcore.coreapi import FabricClientCore
3
+ from datetime import datetime
4
+ from dotenv import load_dotenv
5
+
6
+ load_dotenv()
7
+
8
+ class TestFabricClientCore(unittest.TestCase):
9
+
10
+ def __init__(self, *args, **kwargs):
11
+ super(TestFabricClientCore, self).__init__(*args, **kwargs)
12
+ #load_dotenv()
13
+ self.fc = FabricClientCore()
14
+
15
+ def test_git(self):
16
+
17
+ datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
18
+ ws2_name = "git" + datetime_str
19
+ self.fc.create_workspace(display_name=ws2_name)
20
+ ws2 = self.fc.get_workspace_by_name(name=ws2_name)
21
+ self.fc.assign_to_capacity(workspace_id=ws2.id, capacity_id="840a6c1e-5289-4094-bbc8-716daabaeeba")
22
+
23
+ git_provider_details = {'organizationName': 'MngEnvMCAP065039',
24
+ 'projectName': 'fabricdevops',
25
+ 'gitProviderType': 'AzureDevOps',
26
+ 'repositoryName': 'fabricdevops',
27
+ 'branchName': 'main',
28
+ 'directoryName': '/sdkdemoRTI'}
29
+
30
+ status_code = self.fc.git_connect(workspace_id=ws2.id, git_provider_details=git_provider_details)
31
+
32
+ self.assertEqual(status_code, 200)
33
+
34
+ initialization_strategy = "PreferWorkspace"
35
+
36
+ status_code = self.fc.git_initialize_connection(workspace_id=ws2.id, initialization_strategy=initialization_strategy)
37
+ self.assertEqual(status_code, 200)
38
+
39
+ connection_details = self.fc.git_get_connection(workspace_id=ws2.id)
40
+ self.assertEqual(connection_details['gitConnectionState'], 'ConnectedAndInitialized')
41
+
42
+ status = self.fc.git_get_status(workspace_id=ws2.id)
43
+ self.assertTrue(len(status["changes"]) > 0)
44
+
45
+ git_credentials = self.fc.get_my_git_credentials('e624ffea-990e-482c-b27c-4ed5adae73c6')
46
+ self.assertTrue(git_credentials["source"] == "Automatic")
47
+
48
+ status_code = self.fc.update_from_git(workspace_id=ws2.id, remote_commit_hash=status["remoteCommitHash"])
49
+
50
+ self.assertEqual(status_code, 202)
51
+
52
+ blubb_lakehouse = False
53
+ for item in ws2.list_items():
54
+ if item.type == "Lakehouse" and item.display_name == "blubb":
55
+ blubb_lakehouse = True
56
+
57
+ self.assertTrue(blubb_lakehouse)
58
+
59
+ status_code = self.fc.git_disconnect(workspace_id=ws2.id)
60
+
61
+ self.assertEqual(status_code, 200)
62
+
63
+ ws2.delete()
64
+
65
+ if __name__ == "__main__":
66
+ unittest.main()
@@ -0,0 +1,44 @@
1
+ import unittest
2
+ from datetime import datetime
3
+ from dotenv import load_dotenv
4
+ from time import sleep
5
+ from msfabricpysdkcore.coreapi import FabricClientCore
6
+
7
+ load_dotenv()
8
+
9
+ # class TestFabricClientCore(unittest.TestCase):
10
+
11
+ class TestFabricClientCore(unittest.TestCase):
12
+
13
+ def __init__(self, *args, **kwargs):
14
+ super(TestFabricClientCore, self).__init__(*args, **kwargs)
15
+ #load_dotenv()
16
+ self.fc = FabricClientCore()
17
+
18
+
19
+ def test_graphql_api(self):
20
+
21
+ fc = self.fc
22
+ workspace_id = '46425c13-5736-4285-972c-6d034020f3ff'
23
+
24
+ datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
25
+
26
+ graph_ql = fc.create_graphql_api(workspace_id, display_name="graphql" + datetime_str)
27
+ self.assertEqual(graph_ql.display_name, "graphql" + datetime_str)
28
+
29
+ graph_qls = fc.list_graphql_apis(workspace_id)
30
+ graph_ql_names = [gql.display_name for gql in graph_qls]
31
+ self.assertGreater(len(graph_qls), 0)
32
+ self.assertIn("graphql" + datetime_str, graph_ql_names)
33
+
34
+ gql = fc.get_graphql_api(workspace_id, graphql_api_name="graphql" + datetime_str)
35
+ self.assertIsNotNone(gql.id)
36
+ self.assertEqual(gql.display_name, "graphql" + datetime_str)
37
+
38
+ gql2 = fc.update_graphql_api(workspace_id, gql.id, display_name=f"graphql{datetime_str}2", return_item=True)
39
+
40
+ gql = fc.get_graphql_api(workspace_id, graphql_api_id=gql.id)
41
+ self.assertEqual(gql.display_name, f"graphql{datetime_str}2")
42
+
43
+ status_code = fc.delete_graphql_api(workspace_id, gql.id)
44
+ self.assertEqual(status_code, 200)
@@ -0,0 +1,97 @@
1
+ import unittest
2
+ from datetime import datetime
3
+ from dotenv import load_dotenv
4
+ from time import sleep
5
+ from msfabricpysdkcore.coreapi import FabricClientCore
6
+
7
+ load_dotenv()
8
+
9
+ class TestFabricClientCore(unittest.TestCase):
10
+
11
+ def __init__(self, *args, **kwargs):
12
+ super(TestFabricClientCore, self).__init__(*args, **kwargs)
13
+ #load_dotenv()
14
+ self.fc = FabricClientCore()
15
+ self.workspace_id = '63aa9e13-4912-4abe-9156-8a56e565b7a3'
16
+
17
+
18
+ datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
19
+ self.item_name = "testitem" + datetime_str
20
+ self.item_type = "Notebook"
21
+
22
+ def test_item_end_to_end(self):
23
+
24
+ item = self.fc.create_item(display_name=self.item_name, type=self.item_type, workspace_id=self.workspace_id)
25
+ self.assertEqual(item.display_name, self.item_name)
26
+ self.assertEqual(item.type, self.item_type)
27
+ self.assertEqual(item.workspace_id, self.workspace_id)
28
+ self.assertEqual(item.description, "")
29
+
30
+ item = self.fc.get_item(workspace_id=self.workspace_id, item_id=item.id)
31
+ item_ = self.fc.get_item(workspace_id=self.workspace_id,
32
+ item_name=self.item_name, item_type=self.item_type)
33
+ self.assertEqual(item.id, item_.id)
34
+ self.assertEqual(item.display_name, self.item_name)
35
+ self.assertEqual(item.type, self.item_type)
36
+ self.assertEqual(item.workspace_id, self.workspace_id)
37
+ self.assertEqual(item.description, "")
38
+
39
+ item_list = self.fc.list_items(workspace_id=self.workspace_id)
40
+ self.assertTrue(len(item_list) > 0)
41
+
42
+ item_ids = [item_.id for item_ in item_list]
43
+ self.assertIn(item.id, item_ids)
44
+
45
+ self.fc.update_item(workspace_id=self.workspace_id, item_id=item.id, display_name=f"u{self.item_name}", return_item=True)
46
+ item = self.fc.get_item(workspace_id=self.workspace_id, item_id=item.id)
47
+ self.assertEqual(item.display_name, f"u{self.item_name}")
48
+
49
+ status_code = self.fc.delete_item(workspace_id=self.workspace_id, item_id=item.id)
50
+
51
+ self.assertAlmostEqual(status_code, 200)
52
+
53
+ def test_item_definition(self):
54
+
55
+ sjd = self.fc.get_item(workspace_id=self.workspace_id, item_name="helloworld", item_type="SparkJobDefinition")
56
+ self.assertIsNotNone(sjd.definition)
57
+ datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
58
+ blubb2 = "blubb2" + datetime_str
59
+ blubb3 = "blubb3" + datetime_str
60
+ blubb2 = self.fc.create_item(display_name=blubb2, type="SparkJobDefinition", workspace_id=self.workspace_id,
61
+ definition=sjd.definition)
62
+
63
+ blubb3 = self.fc.create_item(display_name=blubb3, type="SparkJobDefinition", workspace_id=self.workspace_id)
64
+
65
+ response = self.fc.update_item_definition(workspace_id=self.workspace_id,
66
+ item_id=blubb3.id, definition=sjd.definition)
67
+
68
+ self.assertEqual(response.status_code, 200)
69
+ blubb3 = self.fc.get_item(workspace_id=self.workspace_id, item_id=blubb3.id)
70
+
71
+ self.assertIn("parts", blubb3.definition)
72
+
73
+ self.assertEqual(len(blubb3.definition["parts"]), len(sjd.definition["parts"]))
74
+ sjd_defintion = [part["path"] for part in sjd.definition["parts"] if part["path"] == "SparkJobDefinitionV1.json"]
75
+ blubb3_defintion = [part["path"] for part in blubb3.definition["parts"] if part["path"] == "SparkJobDefinitionV1.json"]
76
+ self.assertEqual(sjd_defintion, blubb3_defintion)
77
+
78
+ self.assertNotEqual(blubb2.id, sjd.id)
79
+ self.assertIn("parts", blubb2.definition)
80
+
81
+ self.assertEqual(len(blubb2.definition["parts"]), len(sjd.definition["parts"]))
82
+ sjd_defintion = [part["path"] for part in sjd.definition["parts"] if part["path"] == "SparkJobDefinitionV1.json"]
83
+ blubb2_defintion = [part["path"] for part in blubb2.definition["parts"] if part["path"] == "SparkJobDefinitionV1.json"]
84
+ self.assertEqual(sjd_defintion, blubb2_defintion)
85
+ self.assertNotEqual(blubb2.id, blubb3.id)
86
+
87
+ blubb2.delete()
88
+ blubb3.delete()
89
+
90
+ def test_item_connections(self):
91
+
92
+ fc = self.fc
93
+ connections = fc.list_item_connections(workspace_id = '63aa9e13-4912-4abe-9156-8a56e565b7a3', item_id = '1bcc8b76-8e1f-428d-a594-f91ce1b9b076')
94
+ self.assertEqual(len(connections), 0)
95
+
96
+ if __name__ == "__main__":
97
+ unittest.main()
@@ -0,0 +1,96 @@
1
+ import unittest
2
+ from msfabricpysdkcore.coreapi import FabricClientCore
3
+ from datetime import datetime
4
+ from dotenv import load_dotenv
5
+
6
+ load_dotenv()
7
+
8
+
9
+ class TestFabricClientCore(unittest.TestCase):
10
+
11
+ def __init__(self, *args, **kwargs):
12
+ super(TestFabricClientCore, self).__init__(*args, **kwargs)
13
+ #load_dotenv()
14
+ self.fc = FabricClientCore()
15
+ self.workspace_id = '63aa9e13-4912-4abe-9156-8a56e565b7a3'
16
+ self.item_id = "38a1c15f-8a9e-49c5-8d05-a27cf9ce8b18"
17
+
18
+
19
+ def test_jobs_end_to_end(self):
20
+ job = self.fc.run_on_demand_item_job(workspace_id=self.workspace_id,
21
+ item_id=self.item_id,
22
+ job_type="RunNotebook")
23
+
24
+ self.assertEqual(job.item_id, self.item_id)
25
+ self.assertEqual(job.workspace_id, self.workspace_id)
26
+ self.assertEqual(job.job_type, "RunNotebook")
27
+ self.assertIn(job.status, ["NotStarted", "InProgress", "Failed"])
28
+ self.assertEqual(job.invoke_type, "Manual")
29
+
30
+ job2 = self.fc.get_item_job_instance(workspace_id=self.workspace_id,
31
+ item_id=self.item_id,
32
+ job_instance_id=job.id)
33
+
34
+ self.assertEqual(job.id, job2.id)
35
+
36
+ status_code = self.fc.cancel_item_job_instance(workspace_id=self.workspace_id,
37
+ item_id=self.item_id,
38
+ job_instance_id=job.id)
39
+
40
+ self.assertEqual(status_code, 202)
41
+
42
+ job_instances = self.fc.list_item_job_instances(workspace_id=self.workspace_id,
43
+ item_id=self.item_id)
44
+
45
+ self.assertGreaterEqual(len(job_instances), 1)
46
+
47
+ def test_item_schedules(self):
48
+
49
+ fc = self.fc
50
+
51
+ item_id = "42b6e090-24ff-4dc7-8c52-cdae0ddd2c06"
52
+
53
+ workspace_id = '63aa9e13-4912-4abe-9156-8a56e565b7a3'
54
+
55
+ datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
56
+ spark_job_definition_name = f"sjd{datetime_str}"
57
+
58
+ spark_job_definition_w_content = fc.get_spark_job_definition(workspace_id, spark_job_definition_name="helloworld")
59
+ definition = fc.get_spark_job_definition_definition(workspace_id, spark_job_definition_w_content.id)
60
+
61
+ self.assertIsNotNone(definition)
62
+ self.assertIn("definition", definition)
63
+ definition = definition["definition"]
64
+
65
+ spark_job_definition = fc.create_spark_job_definition(workspace_id, display_name=spark_job_definition_name, definition=definition)
66
+
67
+ self.assertIsNotNone(spark_job_definition)
68
+
69
+ configuration = {'type': 'Daily',
70
+ 'startDateTime': '2024-11-21T00:00:00',
71
+ 'endDateTime': '2028-11-08T23:59:00',
72
+ 'localTimeZoneId': 'Romance Standard Time',
73
+ 'times': ['15:39']}
74
+
75
+ schedule = spark_job_definition.create_item_schedule(job_type="sparkjob", configuration=configuration, enabled=True)
76
+
77
+ schedule_id = schedule["id"]
78
+ schedule_check = spark_job_definition.get_item_schedule(schedule_id=schedule_id, job_type="sparkjob")
79
+ self.assertIsNotNone(schedule_check)
80
+ self.assertEqual(schedule_check["id"], schedule_id)
81
+
82
+ schedule_new = spark_job_definition.update_item_schedule(schedule_id=schedule_id, job_type="sparkjob", configuration=configuration, enabled=False)
83
+ self.assertIsNotNone(schedule_new)
84
+
85
+ schedule_check = spark_job_definition.get_item_schedule(schedule_id=schedule_id, job_type="sparkjob")
86
+ self.assertEqual(schedule_check["id"], schedule_id)
87
+ self.assertFalse(schedule_check["enabled"])
88
+ list_schedules = fc.list_item_schedules(workspace_id, item_id, job_type="sparkjob")
89
+
90
+ self.assertGreater(len(list_schedules), 0)
91
+
92
+ spark_job_definition.delete()
93
+
94
+ if __name__ == "__main__":
95
+ unittest.main()
96
+
@@ -0,0 +1,63 @@
1
+ import unittest
2
+ from datetime import datetime
3
+ from dotenv import load_dotenv
4
+ from time import sleep
5
+ from msfabricpysdkcore.coreapi import FabricClientCore
6
+
7
+ load_dotenv()
8
+
9
+ class TestFabricClientCore(unittest.TestCase):
10
+
11
+ def __init__(self, *args, **kwargs):
12
+ super(TestFabricClientCore, self).__init__(*args, **kwargs)
13
+ #load_dotenv()
14
+ self.fc = FabricClientCore()
15
+
16
+
17
+ def test_kql_dashboards(self):
18
+ datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
19
+ fc = self.fc
20
+ workspace_id = '63aa9e13-4912-4abe-9156-8a56e565b7a3'
21
+
22
+ kql_dash = fc.get_kql_dashboard(workspace_id, kql_dashboard_name='sampledashboard')
23
+ kql_dash_orig_id = kql_dash.id
24
+
25
+
26
+ kql_dash_name = "testdash" + datetime_str
27
+
28
+ kql_dash = fc.create_kql_dashboard(display_name=kql_dash_name, workspace_id=workspace_id)
29
+ self.assertEqual(kql_dash.display_name, kql_dash_name)
30
+
31
+ definition_orig = fc.get_kql_dashboard_definition(workspace_id, kql_dash_orig_id)
32
+ definition_orig = definition_orig["definition"]
33
+ self.assertIsNotNone(definition_orig)
34
+
35
+ definition = fc.update_kql_dashboard_definition(workspace_id, kql_dash.id, definition=definition_orig)
36
+
37
+ self.assertIsNotNone(definition)
38
+
39
+ kql_dashs = fc.list_kql_dashboards(workspace_id)
40
+
41
+ kql_dash_names = [kqld.display_name for kqld in kql_dashs]
42
+ self.assertGreater(len(kql_dashs), 0)
43
+ self.assertIn(kql_dash_name, kql_dash_names)
44
+ self.assertIn('sampledashboard', kql_dash_names)
45
+
46
+ kql_dash2 = fc.get_kql_dashboard(workspace_id, kql_dashboard_name=kql_dash_name)
47
+ self.assertIsNotNone(kql_dash2.id)
48
+ self.assertEqual(kql_dash2.display_name, kql_dash_name)
49
+
50
+ new_name = kql_dash_name+"2"
51
+ kql_dash3 = fc.update_kql_dashboard(workspace_id, kql_dash.id, display_name=new_name, return_item=True)
52
+
53
+ self.assertEqual(kql_dash3.display_name, new_name)
54
+ self.assertEqual(kql_dash.id, kql_dash3.id)
55
+
56
+ resp_code = fc.delete_kql_dashboard(workspace_id, kql_dash3.id)
57
+ self.assertEqual(resp_code, 200)
58
+
59
+ kql_dashs = fc.list_kql_dashboards(workspace_id)
60
+
61
+ kql_dash_names = [kqld.display_name for kqld in kql_dashs]
62
+ self.assertNotIn(kql_dash3.display_name, kql_dash_names)
63
+
@@ -0,0 +1,64 @@
1
+ import unittest
2
+ from datetime import datetime
3
+ from dotenv import load_dotenv
4
+ from time import sleep
5
+ from msfabricpysdkcore.coreapi import FabricClientCore
6
+
7
+ load_dotenv()
8
+
9
+ class TestFabricClientCore(unittest.TestCase):
10
+
11
+ def __init__(self, *args, **kwargs):
12
+ super(TestFabricClientCore, self).__init__(*args, **kwargs)
13
+ #load_dotenv()
14
+ self.fc = FabricClientCore()
15
+
16
+ datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
17
+ self.item_name = "testitem" + datetime_str
18
+ self.item_type = "Notebook"
19
+
20
+ def test_kql_querysets(self):
21
+
22
+ fc = self.fc
23
+ workspace_id = '63aa9e13-4912-4abe-9156-8a56e565b7a3'
24
+
25
+ kql_queryset_name = "kqlqueryset12"
26
+ kqlq_w_content = fc.get_kql_queryset(workspace_id, kql_queryset_name=kql_queryset_name)
27
+
28
+ definition = fc.get_kql_queryset_definition(workspace_id, kqlq_w_content.id)
29
+ self.assertIsNotNone(definition)
30
+ self.assertIn("definition", definition)
31
+ definition = definition["definition"]
32
+
33
+ self.assertIsNotNone(kqlq_w_content.id)
34
+ self.assertEqual(kqlq_w_content.display_name, kql_queryset_name)
35
+
36
+ datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
37
+ kql_queryset_new = "kqlq" + datetime_str
38
+
39
+ kqlq = fc.create_kql_queryset(workspace_id, definition=definition, display_name=kql_queryset_new)
40
+ self.assertIsNotNone(kqlq.id)
41
+ self.assertEqual(kqlq.display_name, kql_queryset_new)
42
+
43
+ fc.update_kql_queryset_definition(workspace_id, kqlq.id, definition=definition)
44
+ kqlq = fc.get_kql_queryset(workspace_id, kqlq.id)
45
+ self.assertEqual(kqlq.display_name, kql_queryset_new)
46
+ self.assertIsNotNone(kqlq.definition)
47
+
48
+ kqlqs = fc.list_kql_querysets(workspace_id)
49
+ kqlq_names = [kql.display_name for kql in kqlqs]
50
+ self.assertGreater(len(kqlqs), 0)
51
+ self.assertIn(kql_queryset_new, kqlq_names)
52
+
53
+ kqlq = fc.get_kql_queryset(workspace_id, kql_queryset_name=kql_queryset_new)
54
+ self.assertIsNotNone(kqlq.id)
55
+ self.assertEqual(kqlq.display_name, kql_queryset_new)
56
+
57
+ kqlq2 = fc.update_kql_queryset(workspace_id, kql_queryset_id=kqlq.id, display_name=f"{kql_queryset_new}2", return_item=True)
58
+
59
+ kqlq = fc.get_kql_queryset(workspace_id, kql_queryset_id=kqlq.id)
60
+ self.assertEqual(kqlq.display_name, f"{kql_queryset_new}2")
61
+ self.assertEqual(kqlq.id, kqlq2.id)
62
+
63
+ status_code = fc.delete_kql_queryset(workspace_id, kqlq.id)
64
+ self.assertEqual(status_code, 200)
@@ -0,0 +1,56 @@
1
+ import unittest
2
+ from datetime import datetime
3
+ from dotenv import load_dotenv
4
+ from time import sleep
5
+ from msfabricpysdkcore.coreapi import FabricClientCore
6
+
7
+ load_dotenv()
8
+
9
+ class TestFabricClientCore(unittest.TestCase):
10
+
11
+ def __init__(self, *args, **kwargs):
12
+ super(TestFabricClientCore, self).__init__(*args, **kwargs)
13
+ #load_dotenv()
14
+ self.fc = FabricClientCore()
15
+
16
+ def test_kql_database(self):
17
+
18
+ fc = self.fc
19
+ workspace_id = '63aa9e13-4912-4abe-9156-8a56e565b7a3'
20
+ evenhouse_id = "f30ba76a-92c3-40d3-ad69-36db059c113d"
21
+
22
+ creation_payload = {"databaseType" : "ReadWrite",
23
+ "parentEventhouseItemId" : evenhouse_id}
24
+
25
+ datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
26
+ kqldb_name = "kql" + datetime_str
27
+ kqldb = fc.create_kql_database(workspace_id = workspace_id, display_name=kqldb_name,
28
+ creation_payload=creation_payload)
29
+ self.assertEqual(kqldb.display_name, kqldb_name)
30
+
31
+ kql_databases = fc.list_kql_databases(workspace_id)
32
+ kql_database_names = [kqldb.display_name for kqldb in kql_databases]
33
+ self.assertGreater(len(kql_databases), 0)
34
+ self.assertIn(kqldb_name, kql_database_names)
35
+
36
+ kqldb = fc.get_kql_database(workspace_id, kql_database_name=kqldb_name)
37
+ self.assertIsNotNone(kqldb.id)
38
+ self.assertEqual(kqldb.display_name, kqldb_name)
39
+
40
+ new_name = kqldb_name+"2"
41
+ kqldb2 = fc.update_kql_database(workspace_id, kqldb.id, display_name=new_name, return_item=True)
42
+
43
+ kqldb = fc.get_kql_database(workspace_id, kql_database_id=kqldb.id)
44
+ self.assertEqual(kqldb.display_name, new_name)
45
+ self.assertEqual(kqldb.id, kqldb2.id)
46
+
47
+ response = fc.update_kql_database_definition(workspace_id, kqldb.id, kqldb.definition)
48
+ self.assertIn(response.status_code, [200, 202])
49
+
50
+ definition = fc.get_kql_database_definition(workspace_id, kql_database_id=kqldb.id)
51
+ self.assertIn("definition", definition)
52
+ self.assertIn("parts", definition["definition"])
53
+ self.assertGreaterEqual(len(definition["definition"]["parts"]), 3)
54
+
55
+ status_code = fc.delete_kql_database(workspace_id, kqldb.id)
56
+ self.assertEqual(status_code, 200)
@@ -0,0 +1,93 @@
1
+ import unittest
2
+ from datetime import datetime
3
+ from dotenv import load_dotenv
4
+ from time import sleep
5
+ from msfabricpysdkcore.coreapi import FabricClientCore
6
+
7
+ load_dotenv()
8
+
9
+ class TestFabricClientCore(unittest.TestCase):
10
+
11
+ def __init__(self, *args, **kwargs):
12
+ super(TestFabricClientCore, self).__init__(*args, **kwargs)
13
+ #load_dotenv()
14
+ self.fc = FabricClientCore()
15
+
16
+ datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
17
+ self.item_name = "testitem" + datetime_str
18
+ self.item_type = "Notebook"
19
+
20
+ def test_lakehouse(self):
21
+
22
+ lakehouse2 = "lh2" + datetime.now().strftime("%Y%m%d%H%M%S")
23
+ lakehouse3 = "lh3" + datetime.now().strftime("%Y%m%d%H%M%S")
24
+
25
+ workspace_id = '63aa9e13-4912-4abe-9156-8a56e565b7a3'
26
+ lhs = self.fc.list_lakehouses(workspace_id = workspace_id, with_properties=True)
27
+ lh = [lh_ for lh_ in lhs if lh_.display_name == "lakelhousewlabels"][0]
28
+ tables = lh.list_tables()
29
+ table_names = [t["name"] for t in tables]
30
+ self.assertIn("titanic2", table_names)
31
+
32
+ lakehouse = self.fc.get_item(workspace_id=workspace_id, item_name="lakelhousewlabels", item_type="Lakehouse")
33
+ self.assertIsNotNone(lakehouse.properties)
34
+ lakehouse_id = lakehouse.id
35
+ date_str = datetime.now().strftime("%Y%m%d%H%M%S")
36
+ table_name = f"table{date_str}"
37
+
38
+
39
+ status_code = self.fc.load_table(workspace_id=workspace_id, lakehouse_id=lakehouse_id, table_name=table_name,
40
+ path_type="File", relative_path="Files/to_share/titanic2.csv")
41
+
42
+ self.assertEqual(status_code, 202)
43
+
44
+ # Run on demand table maintenance
45
+ table_name_maintenance = "table20240515114529"
46
+
47
+ execution_data = {
48
+ "tableName": table_name_maintenance,
49
+ "optimizeSettings": {
50
+ "vOrder": True,
51
+ "zOrderBy": [
52
+ "tipAmount"
53
+ ]
54
+ },
55
+ "vacuumSettings": {
56
+ "retentionPeriod": "7:01:00:00"
57
+ }
58
+ }
59
+
60
+ response = self.fc.run_on_demand_table_maintenance(workspace_id=workspace_id, lakehouse_id=lakehouse_id,
61
+ execution_data = execution_data,
62
+ job_type = "TableMaintenance", wait_for_completion = False)
63
+ self.assertIn(response.status_code, [200, 202])
64
+
65
+ table_list = self.fc.list_tables(workspace_id=workspace_id, lakehouse_id=lakehouse_id)
66
+ table_names = [table["name"] for table in table_list]
67
+
68
+ self.assertIn(table_name, table_names)
69
+
70
+ fc = self.fc
71
+
72
+ lakehouse = fc.create_lakehouse(workspace_id=workspace_id, display_name=lakehouse2)
73
+ self.assertIsNotNone(lakehouse.id)
74
+
75
+ lakehouses = fc.list_lakehouses(workspace_id)
76
+ lakehouse_names = [lh.display_name for lh in lakehouses]
77
+ self.assertGreater(len(lakehouse_names), 0)
78
+ self.assertIn(lakehouse2, lakehouse_names)
79
+
80
+ lakehouse2 = fc.get_lakehouse(workspace_id=workspace_id, lakehouse_id=lakehouse.id)
81
+ self.assertEqual(lakehouse.id, lakehouse2.id)
82
+
83
+ sleep(20)
84
+ lakehouse2 = fc.update_lakehouse(workspace_id=workspace_id, lakehouse_id=lakehouse.id, display_name=lakehouse3, return_item=True)
85
+ self.assertEqual(lakehouse2.display_name, lakehouse3)
86
+
87
+ id = lakehouse2.id
88
+
89
+ lakehouse2 = fc.get_lakehouse(workspace_id=workspace_id, lakehouse_name=lakehouse3)
90
+ self.assertEqual(lakehouse2.id, id)
91
+
92
+ status_code = fc.delete_lakehouse(workspace_id=workspace_id, lakehouse_id=lakehouse.id)
93
+ self.assertEqual(status_code, 200)