msfabricpysdkcore 0.2.1__py3-none-any.whl → 0.2.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. msfabricpysdkcore/adminapi.py +142 -2
  2. msfabricpysdkcore/coreapi.py +476 -9
  3. msfabricpysdkcore/domain.py +4 -1
  4. msfabricpysdkcore/item.py +6 -4
  5. msfabricpysdkcore/otheritems.py +93 -0
  6. msfabricpysdkcore/tests/__init__.py +0 -0
  7. msfabricpysdkcore/tests/test_admin_apis.py +174 -0
  8. msfabricpysdkcore/tests/test_connection.py +111 -0
  9. msfabricpysdkcore/tests/test_datapipelines.py +45 -0
  10. msfabricpysdkcore/tests/test_deployment_pipeline.py +63 -0
  11. msfabricpysdkcore/tests/test_domains.py +126 -0
  12. msfabricpysdkcore/tests/test_environments.py +114 -0
  13. msfabricpysdkcore/tests/test_evenhouses.py +56 -0
  14. msfabricpysdkcore/tests/test_evenstreams.py +52 -0
  15. msfabricpysdkcore/tests/test_external_data_shares.py +51 -0
  16. msfabricpysdkcore/tests/test_fabric_azure_client.py +80 -0
  17. msfabricpysdkcore/tests/test_gateways.py +99 -0
  18. msfabricpysdkcore/tests/test_git.py +66 -0
  19. msfabricpysdkcore/tests/test_graphqlapi.py +44 -0
  20. msfabricpysdkcore/tests/test_items.py +97 -0
  21. msfabricpysdkcore/tests/test_jobs.py +96 -0
  22. msfabricpysdkcore/tests/test_kql_dashboards.py +63 -0
  23. msfabricpysdkcore/tests/test_kql_queryset.py +64 -0
  24. msfabricpysdkcore/tests/test_kqldatabases.py +56 -0
  25. msfabricpysdkcore/tests/test_lakehouse.py +93 -0
  26. msfabricpysdkcore/tests/test_managed_private_endpoints.py +61 -0
  27. msfabricpysdkcore/tests/test_mirroreddatabases.py +80 -0
  28. msfabricpysdkcore/tests/test_ml_experiments.py +47 -0
  29. msfabricpysdkcore/tests/test_ml_models.py +47 -0
  30. msfabricpysdkcore/tests/test_mounted_adf.py +64 -0
  31. msfabricpysdkcore/tests/test_notebooks.py +57 -0
  32. msfabricpysdkcore/tests/test_one_lake_data_access_security.py +63 -0
  33. msfabricpysdkcore/tests/test_other_items.py +45 -0
  34. msfabricpysdkcore/tests/test_reflex.py +57 -0
  35. msfabricpysdkcore/tests/test_reports.py +56 -0
  36. msfabricpysdkcore/tests/test_semantic_model.py +56 -0
  37. msfabricpysdkcore/tests/test_shortcuts.py +60 -0
  38. msfabricpysdkcore/tests/test_spark.py +91 -0
  39. msfabricpysdkcore/tests/test_sparkjobdefinition.py +55 -0
  40. msfabricpysdkcore/tests/test_sqldatabases.py +45 -0
  41. msfabricpysdkcore/tests/test_warehouses.py +50 -0
  42. msfabricpysdkcore/tests/test_workspaces_capacities.py +159 -0
  43. msfabricpysdkcore/workspace.py +149 -5
  44. {msfabricpysdkcore-0.2.1.dist-info → msfabricpysdkcore-0.2.3.dist-info}/METADATA +33 -6
  45. msfabricpysdkcore-0.2.3.dist-info/RECORD +65 -0
  46. {msfabricpysdkcore-0.2.1.dist-info → msfabricpysdkcore-0.2.3.dist-info}/WHEEL +1 -1
  47. msfabricpysdkcore-0.2.1.dist-info/RECORD +0 -28
  48. {msfabricpysdkcore-0.2.1.dist-info → msfabricpysdkcore-0.2.3.dist-info/licenses}/LICENSE +0 -0
  49. {msfabricpysdkcore-0.2.1.dist-info → msfabricpysdkcore-0.2.3.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,96 @@
1
+ import unittest
2
+ from msfabricpysdkcore.coreapi import FabricClientCore
3
+ from datetime import datetime
4
+ from dotenv import load_dotenv
5
+
6
+ load_dotenv()
7
+
8
+
9
+ class TestFabricClientCore(unittest.TestCase):
10
+
11
+ def __init__(self, *args, **kwargs):
12
+ super(TestFabricClientCore, self).__init__(*args, **kwargs)
13
+ #load_dotenv()
14
+ self.fc = FabricClientCore()
15
+ self.workspace_id = '63aa9e13-4912-4abe-9156-8a56e565b7a3'
16
+ self.item_id = "38a1c15f-8a9e-49c5-8d05-a27cf9ce8b18"
17
+
18
+
19
+ def test_jobs_end_to_end(self):
20
+ job = self.fc.run_on_demand_item_job(workspace_id=self.workspace_id,
21
+ item_id=self.item_id,
22
+ job_type="RunNotebook")
23
+
24
+ self.assertEqual(job.item_id, self.item_id)
25
+ self.assertEqual(job.workspace_id, self.workspace_id)
26
+ self.assertEqual(job.job_type, "RunNotebook")
27
+ self.assertIn(job.status, ["NotStarted", "InProgress", "Failed"])
28
+ self.assertEqual(job.invoke_type, "Manual")
29
+
30
+ job2 = self.fc.get_item_job_instance(workspace_id=self.workspace_id,
31
+ item_id=self.item_id,
32
+ job_instance_id=job.id)
33
+
34
+ self.assertEqual(job.id, job2.id)
35
+
36
+ status_code = self.fc.cancel_item_job_instance(workspace_id=self.workspace_id,
37
+ item_id=self.item_id,
38
+ job_instance_id=job.id)
39
+
40
+ self.assertEqual(status_code, 202)
41
+
42
+ job_instances = self.fc.list_item_job_instances(workspace_id=self.workspace_id,
43
+ item_id=self.item_id)
44
+
45
+ self.assertGreaterEqual(len(job_instances), 1)
46
+
47
+ def test_item_schedules(self):
48
+
49
+ fc = self.fc
50
+
51
+ item_id = "42b6e090-24ff-4dc7-8c52-cdae0ddd2c06"
52
+
53
+ workspace_id = '63aa9e13-4912-4abe-9156-8a56e565b7a3'
54
+
55
+ datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
56
+ spark_job_definition_name = f"sjd{datetime_str}"
57
+
58
+ spark_job_definition_w_content = fc.get_spark_job_definition(workspace_id, spark_job_definition_name="helloworld")
59
+ definition = fc.get_spark_job_definition_definition(workspace_id, spark_job_definition_w_content.id)
60
+
61
+ self.assertIsNotNone(definition)
62
+ self.assertIn("definition", definition)
63
+ definition = definition["definition"]
64
+
65
+ spark_job_definition = fc.create_spark_job_definition(workspace_id, display_name=spark_job_definition_name, definition=definition)
66
+
67
+ self.assertIsNotNone(spark_job_definition)
68
+
69
+ configuration = {'type': 'Daily',
70
+ 'startDateTime': '2024-11-21T00:00:00',
71
+ 'endDateTime': '2028-11-08T23:59:00',
72
+ 'localTimeZoneId': 'Romance Standard Time',
73
+ 'times': ['15:39']}
74
+
75
+ schedule = spark_job_definition.create_item_schedule(job_type="sparkjob", configuration=configuration, enabled=True)
76
+
77
+ schedule_id = schedule["id"]
78
+ schedule_check = spark_job_definition.get_item_schedule(schedule_id=schedule_id, job_type="sparkjob")
79
+ self.assertIsNotNone(schedule_check)
80
+ self.assertEqual(schedule_check["id"], schedule_id)
81
+
82
+ schedule_new = spark_job_definition.update_item_schedule(schedule_id=schedule_id, job_type="sparkjob", configuration=configuration, enabled=False)
83
+ self.assertIsNotNone(schedule_new)
84
+
85
+ schedule_check = spark_job_definition.get_item_schedule(schedule_id=schedule_id, job_type="sparkjob")
86
+ self.assertEqual(schedule_check["id"], schedule_id)
87
+ self.assertFalse(schedule_check["enabled"])
88
+ list_schedules = fc.list_item_schedules(workspace_id, item_id, job_type="sparkjob")
89
+
90
+ self.assertGreater(len(list_schedules), 0)
91
+
92
+ spark_job_definition.delete()
93
+
94
+ if __name__ == "__main__":
95
+ unittest.main()
96
+
@@ -0,0 +1,63 @@
1
+ import unittest
2
+ from datetime import datetime
3
+ from dotenv import load_dotenv
4
+ from time import sleep
5
+ from msfabricpysdkcore.coreapi import FabricClientCore
6
+
7
+ load_dotenv()
8
+
9
+ class TestFabricClientCore(unittest.TestCase):
10
+
11
+ def __init__(self, *args, **kwargs):
12
+ super(TestFabricClientCore, self).__init__(*args, **kwargs)
13
+ #load_dotenv()
14
+ self.fc = FabricClientCore()
15
+
16
+
17
+ def test_kql_dashboards(self):
18
+ datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
19
+ fc = self.fc
20
+ workspace_id = '63aa9e13-4912-4abe-9156-8a56e565b7a3'
21
+
22
+ kql_dash = fc.get_kql_dashboard(workspace_id, kql_dashboard_name='sampledashboard')
23
+ kql_dash_orig_id = kql_dash.id
24
+
25
+
26
+ kql_dash_name = "testdash" + datetime_str
27
+
28
+ kql_dash = fc.create_kql_dashboard(display_name=kql_dash_name, workspace_id=workspace_id)
29
+ self.assertEqual(kql_dash.display_name, kql_dash_name)
30
+
31
+ definition_orig = fc.get_kql_dashboard_definition(workspace_id, kql_dash_orig_id)
32
+ definition_orig = definition_orig["definition"]
33
+ self.assertIsNotNone(definition_orig)
34
+
35
+ definition = fc.update_kql_dashboard_definition(workspace_id, kql_dash.id, definition=definition_orig)
36
+
37
+ self.assertIsNotNone(definition)
38
+
39
+ kql_dashs = fc.list_kql_dashboards(workspace_id)
40
+
41
+ kql_dash_names = [kqld.display_name for kqld in kql_dashs]
42
+ self.assertGreater(len(kql_dashs), 0)
43
+ self.assertIn(kql_dash_name, kql_dash_names)
44
+ self.assertIn('sampledashboard', kql_dash_names)
45
+
46
+ kql_dash2 = fc.get_kql_dashboard(workspace_id, kql_dashboard_name=kql_dash_name)
47
+ self.assertIsNotNone(kql_dash2.id)
48
+ self.assertEqual(kql_dash2.display_name, kql_dash_name)
49
+
50
+ new_name = kql_dash_name+"2"
51
+ kql_dash3 = fc.update_kql_dashboard(workspace_id, kql_dash.id, display_name=new_name, return_item=True)
52
+
53
+ self.assertEqual(kql_dash3.display_name, new_name)
54
+ self.assertEqual(kql_dash.id, kql_dash3.id)
55
+
56
+ resp_code = fc.delete_kql_dashboard(workspace_id, kql_dash3.id)
57
+ self.assertEqual(resp_code, 200)
58
+
59
+ kql_dashs = fc.list_kql_dashboards(workspace_id)
60
+
61
+ kql_dash_names = [kqld.display_name for kqld in kql_dashs]
62
+ self.assertNotIn(kql_dash3.display_name, kql_dash_names)
63
+
@@ -0,0 +1,64 @@
1
+ import unittest
2
+ from datetime import datetime
3
+ from dotenv import load_dotenv
4
+ from time import sleep
5
+ from msfabricpysdkcore.coreapi import FabricClientCore
6
+
7
+ load_dotenv()
8
+
9
+ class TestFabricClientCore(unittest.TestCase):
10
+
11
+ def __init__(self, *args, **kwargs):
12
+ super(TestFabricClientCore, self).__init__(*args, **kwargs)
13
+ #load_dotenv()
14
+ self.fc = FabricClientCore()
15
+
16
+ datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
17
+ self.item_name = "testitem" + datetime_str
18
+ self.item_type = "Notebook"
19
+
20
+ def test_kql_querysets(self):
21
+
22
+ fc = self.fc
23
+ workspace_id = '63aa9e13-4912-4abe-9156-8a56e565b7a3'
24
+
25
+ kql_queryset_name = "kqlqueryset12"
26
+ kqlq_w_content = fc.get_kql_queryset(workspace_id, kql_queryset_name=kql_queryset_name)
27
+
28
+ definition = fc.get_kql_queryset_definition(workspace_id, kqlq_w_content.id)
29
+ self.assertIsNotNone(definition)
30
+ self.assertIn("definition", definition)
31
+ definition = definition["definition"]
32
+
33
+ self.assertIsNotNone(kqlq_w_content.id)
34
+ self.assertEqual(kqlq_w_content.display_name, kql_queryset_name)
35
+
36
+ datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
37
+ kql_queryset_new = "kqlq" + datetime_str
38
+
39
+ kqlq = fc.create_kql_queryset(workspace_id, definition=definition, display_name=kql_queryset_new)
40
+ self.assertIsNotNone(kqlq.id)
41
+ self.assertEqual(kqlq.display_name, kql_queryset_new)
42
+
43
+ fc.update_kql_queryset_definition(workspace_id, kqlq.id, definition=definition)
44
+ kqlq = fc.get_kql_queryset(workspace_id, kqlq.id)
45
+ self.assertEqual(kqlq.display_name, kql_queryset_new)
46
+ self.assertIsNotNone(kqlq.definition)
47
+
48
+ kqlqs = fc.list_kql_querysets(workspace_id)
49
+ kqlq_names = [kql.display_name for kql in kqlqs]
50
+ self.assertGreater(len(kqlqs), 0)
51
+ self.assertIn(kql_queryset_new, kqlq_names)
52
+
53
+ kqlq = fc.get_kql_queryset(workspace_id, kql_queryset_name=kql_queryset_new)
54
+ self.assertIsNotNone(kqlq.id)
55
+ self.assertEqual(kqlq.display_name, kql_queryset_new)
56
+
57
+ kqlq2 = fc.update_kql_queryset(workspace_id, kql_queryset_id=kqlq.id, display_name=f"{kql_queryset_new}2", return_item=True)
58
+
59
+ kqlq = fc.get_kql_queryset(workspace_id, kql_queryset_id=kqlq.id)
60
+ self.assertEqual(kqlq.display_name, f"{kql_queryset_new}2")
61
+ self.assertEqual(kqlq.id, kqlq2.id)
62
+
63
+ status_code = fc.delete_kql_queryset(workspace_id, kqlq.id)
64
+ self.assertEqual(status_code, 200)
@@ -0,0 +1,56 @@
1
+ import unittest
2
+ from datetime import datetime
3
+ from dotenv import load_dotenv
4
+ from time import sleep
5
+ from msfabricpysdkcore.coreapi import FabricClientCore
6
+
7
+ load_dotenv()
8
+
9
+ class TestFabricClientCore(unittest.TestCase):
10
+
11
+ def __init__(self, *args, **kwargs):
12
+ super(TestFabricClientCore, self).__init__(*args, **kwargs)
13
+ #load_dotenv()
14
+ self.fc = FabricClientCore()
15
+
16
+ def test_kql_database(self):
17
+
18
+ fc = self.fc
19
+ workspace_id = '63aa9e13-4912-4abe-9156-8a56e565b7a3'
20
+ evenhouse_id = "f30ba76a-92c3-40d3-ad69-36db059c113d"
21
+
22
+ creation_payload = {"databaseType" : "ReadWrite",
23
+ "parentEventhouseItemId" : evenhouse_id}
24
+
25
+ datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
26
+ kqldb_name = "kql" + datetime_str
27
+ kqldb = fc.create_kql_database(workspace_id = workspace_id, display_name=kqldb_name,
28
+ creation_payload=creation_payload)
29
+ self.assertEqual(kqldb.display_name, kqldb_name)
30
+
31
+ kql_databases = fc.list_kql_databases(workspace_id)
32
+ kql_database_names = [kqldb.display_name for kqldb in kql_databases]
33
+ self.assertGreater(len(kql_databases), 0)
34
+ self.assertIn(kqldb_name, kql_database_names)
35
+
36
+ kqldb = fc.get_kql_database(workspace_id, kql_database_name=kqldb_name)
37
+ self.assertIsNotNone(kqldb.id)
38
+ self.assertEqual(kqldb.display_name, kqldb_name)
39
+
40
+ new_name = kqldb_name+"2"
41
+ kqldb2 = fc.update_kql_database(workspace_id, kqldb.id, display_name=new_name, return_item=True)
42
+
43
+ kqldb = fc.get_kql_database(workspace_id, kql_database_id=kqldb.id)
44
+ self.assertEqual(kqldb.display_name, new_name)
45
+ self.assertEqual(kqldb.id, kqldb2.id)
46
+
47
+ response = fc.update_kql_database_definition(workspace_id, kqldb.id, kqldb.definition)
48
+ self.assertIn(response.status_code, [200, 202])
49
+
50
+ definition = fc.get_kql_database_definition(workspace_id, kql_database_id=kqldb.id)
51
+ self.assertIn("definition", definition)
52
+ self.assertIn("parts", definition["definition"])
53
+ self.assertGreaterEqual(len(definition["definition"]["parts"]), 3)
54
+
55
+ status_code = fc.delete_kql_database(workspace_id, kqldb.id)
56
+ self.assertEqual(status_code, 200)
@@ -0,0 +1,93 @@
1
+ import unittest
2
+ from datetime import datetime
3
+ from dotenv import load_dotenv
4
+ from time import sleep
5
+ from msfabricpysdkcore.coreapi import FabricClientCore
6
+
7
+ load_dotenv()
8
+
9
+ class TestFabricClientCore(unittest.TestCase):
10
+
11
+ def __init__(self, *args, **kwargs):
12
+ super(TestFabricClientCore, self).__init__(*args, **kwargs)
13
+ #load_dotenv()
14
+ self.fc = FabricClientCore()
15
+
16
+ datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
17
+ self.item_name = "testitem" + datetime_str
18
+ self.item_type = "Notebook"
19
+
20
+ def test_lakehouse(self):
21
+
22
+ lakehouse2 = "lh2" + datetime.now().strftime("%Y%m%d%H%M%S")
23
+ lakehouse3 = "lh3" + datetime.now().strftime("%Y%m%d%H%M%S")
24
+
25
+ workspace_id = '63aa9e13-4912-4abe-9156-8a56e565b7a3'
26
+ lhs = self.fc.list_lakehouses(workspace_id = workspace_id, with_properties=True)
27
+ lh = [lh_ for lh_ in lhs if lh_.display_name == "lakelhousewlabels"][0]
28
+ tables = lh.list_tables()
29
+ table_names = [t["name"] for t in tables]
30
+ self.assertIn("titanic2", table_names)
31
+
32
+ lakehouse = self.fc.get_item(workspace_id=workspace_id, item_name="lakelhousewlabels", item_type="Lakehouse")
33
+ self.assertIsNotNone(lakehouse.properties)
34
+ lakehouse_id = lakehouse.id
35
+ date_str = datetime.now().strftime("%Y%m%d%H%M%S")
36
+ table_name = f"table{date_str}"
37
+
38
+
39
+ status_code = self.fc.load_table(workspace_id=workspace_id, lakehouse_id=lakehouse_id, table_name=table_name,
40
+ path_type="File", relative_path="Files/to_share/titanic2.csv")
41
+
42
+ self.assertEqual(status_code, 202)
43
+
44
+ # Run on demand table maintenance
45
+ table_name_maintenance = "table20240515114529"
46
+
47
+ execution_data = {
48
+ "tableName": table_name_maintenance,
49
+ "optimizeSettings": {
50
+ "vOrder": True,
51
+ "zOrderBy": [
52
+ "tipAmount"
53
+ ]
54
+ },
55
+ "vacuumSettings": {
56
+ "retentionPeriod": "7:01:00:00"
57
+ }
58
+ }
59
+
60
+ response = self.fc.run_on_demand_table_maintenance(workspace_id=workspace_id, lakehouse_id=lakehouse_id,
61
+ execution_data = execution_data,
62
+ job_type = "TableMaintenance", wait_for_completion = False)
63
+ self.assertIn(response.status_code, [200, 202])
64
+
65
+ table_list = self.fc.list_tables(workspace_id=workspace_id, lakehouse_id=lakehouse_id)
66
+ table_names = [table["name"] for table in table_list]
67
+
68
+ self.assertIn(table_name, table_names)
69
+
70
+ fc = self.fc
71
+
72
+ lakehouse = fc.create_lakehouse(workspace_id=workspace_id, display_name=lakehouse2)
73
+ self.assertIsNotNone(lakehouse.id)
74
+
75
+ lakehouses = fc.list_lakehouses(workspace_id)
76
+ lakehouse_names = [lh.display_name for lh in lakehouses]
77
+ self.assertGreater(len(lakehouse_names), 0)
78
+ self.assertIn(lakehouse2, lakehouse_names)
79
+
80
+ lakehouse2 = fc.get_lakehouse(workspace_id=workspace_id, lakehouse_id=lakehouse.id)
81
+ self.assertEqual(lakehouse.id, lakehouse2.id)
82
+
83
+ sleep(20)
84
+ lakehouse2 = fc.update_lakehouse(workspace_id=workspace_id, lakehouse_id=lakehouse.id, display_name=lakehouse3, return_item=True)
85
+ self.assertEqual(lakehouse2.display_name, lakehouse3)
86
+
87
+ id = lakehouse2.id
88
+
89
+ lakehouse2 = fc.get_lakehouse(workspace_id=workspace_id, lakehouse_name=lakehouse3)
90
+ self.assertEqual(lakehouse2.id, id)
91
+
92
+ status_code = fc.delete_lakehouse(workspace_id=workspace_id, lakehouse_id=lakehouse.id)
93
+ self.assertEqual(status_code, 200)
@@ -0,0 +1,61 @@
1
+ import unittest
2
+ from datetime import datetime
3
+ from dotenv import load_dotenv
4
+ from time import sleep
5
+ from msfabricpysdkcore.coreapi import FabricClientCore
6
+
7
+ load_dotenv()
8
+
9
+ class TestFabricClientCore(unittest.TestCase):
10
+
11
+ def __init__(self, *args, **kwargs):
12
+ super(TestFabricClientCore, self).__init__(*args, **kwargs)
13
+ #load_dotenv()
14
+ self.fc = FabricClientCore()
15
+
16
+ def test_workspace_managed_private_endpoints(self):
17
+
18
+ fc = self.fc
19
+ datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
20
+
21
+ mpes = fc.list_workspace_managed_private_endpoints(workspace_id='535910f6-abe1-499d-94ad-f698c28805fb')
22
+
23
+ if len(mpes) > 0:
24
+
25
+ for mpe in mpes:
26
+ status_code = fc.delete_workspace_managed_private_endpoint(workspace_id='535910f6-abe1-499d-94ad-f698c28805fb',
27
+ managed_private_endpoint_id=mpe["id"])
28
+ self.assertEqual(status_code, 200)
29
+ sleep(60)
30
+
31
+ mpe = fc.create_workspace_managed_private_endpoint(workspace_id='535910f6-abe1-499d-94ad-f698c28805fb',
32
+ name = f'testmpe{datetime_str}',
33
+ target_private_link_resource_id = '/subscriptions/c77cc8fc-43bb-4d44-bdc5-6e20511ed2a8/resourceGroups/fabricdemo/providers/Microsoft.Storage/storageAccounts/publicfabricdemo9039',
34
+ target_subresource_type = 'dfs',
35
+ request_message = 'testmessage')
36
+
37
+ mpes = fc.list_workspace_managed_private_endpoints(workspace_id='535910f6-abe1-499d-94ad-f698c28805fb')
38
+
39
+ self.assertIsNotNone(mpes)
40
+ self.assertGreater(len(mpes), 0)
41
+
42
+ mpe2 = fc.get_workspace_managed_private_endpoint(workspace_id='535910f6-abe1-499d-94ad-f698c28805fb',
43
+ managed_private_endpoint_id=mpe["id"])
44
+
45
+ self.assertEqual(mpe2["id"], mpe["id"])
46
+
47
+ self.assertIsNotNone(mpe2["connectionState"])
48
+ self.assertIn("targetPrivateLinkResourceId", mpe2)
49
+ self.assertEqual(mpe2["targetPrivateLinkResourceId"], "/subscriptions/c77cc8fc-43bb-4d44-bdc5-6e20511ed2a8/resourceGroups/fabricdemo/providers/Microsoft.Storage/storageAccounts/publicfabricdemo9039")
50
+
51
+ for _ in range(0, 20):
52
+ if mpe2["connectionState"]["status"] != "Pending":
53
+ sleep(30)
54
+ else:
55
+ status_code = fc.delete_workspace_managed_private_endpoint(workspace_id='535910f6-abe1-499d-94ad-f698c28805fb',
56
+ managed_private_endpoint_id=mpe["id"])
57
+ self.assertEqual(status_code, 200)
58
+ break
59
+ mpe2 = fc.get_workspace_managed_private_endpoint(workspace_id='535910f6-abe1-499d-94ad-f698c28805fb',
60
+ managed_private_endpoint_id=mpe["id"])
61
+
@@ -0,0 +1,80 @@
1
+ import unittest
2
+ from datetime import datetime
3
+ from dotenv import load_dotenv
4
+ from time import sleep
5
+ from msfabricpysdkcore.coreapi import FabricClientCore
6
+
7
+ load_dotenv()
8
+
9
+ class TestFabricClientCore(unittest.TestCase):
10
+
11
+ def __init__(self, *args, **kwargs):
12
+ super(TestFabricClientCore, self).__init__(*args, **kwargs)
13
+ #load_dotenv()
14
+ self.fc = FabricClientCore()
15
+
16
+ def test_mirrored_database(self):
17
+
18
+ fc = self.fc
19
+ workspace_id = '46425c13-5736-4285-972c-6d034020f3ff'
20
+ datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
21
+
22
+ mirrored_db_name = "mirrored_db" + datetime_str
23
+
24
+ # mirrored_db_w_content = fc.get_mirrored_database(workspace_id, mirrored_database_name="dfsdemo")
25
+
26
+ # status = fc.get_mirroring_status(workspace_id=workspace_id, mirrored_database_id=mirrored_db_w_content.id)
27
+ # self.assertIsNotNone(status)
28
+ # self.assertIn("status", status)
29
+
30
+ # status = status["status"]
31
+
32
+ # if status == 'Running':
33
+ # fc.stop_mirroring(workspace_id=workspace_id, mirrored_database_id=mirrored_db_w_content.id)
34
+ # sleep(60)
35
+ # fc.start_mirroring(workspace_id=workspace_id, mirrored_database_id=mirrored_db_w_content.id)
36
+ # else:
37
+ # fc.start_mirroring(workspace_id=workspace_id, mirrored_database_id=mirrored_db_w_content.id)
38
+ # sleep(60)
39
+ # fc.stop_mirroring(workspace_id=workspace_id, mirrored_database_id=mirrored_db_w_content.id)
40
+
41
+ # table_status = fc.get_tables_mirroring_status(workspace_id=workspace_id, mirrored_database_id=mirrored_db_w_content.id)
42
+
43
+ # self.assertIsNotNone(table_status)
44
+ # self.assertIn("data", table_status)
45
+ # for _ in range(5):
46
+ # if len(table_status["data"]) > 0:
47
+ # break
48
+ # sleep(60)
49
+ # table_status = fc.get_tables_mirroring_status(workspace_id=workspace_id, mirrored_database_id=mirrored_db_w_content.id)
50
+ # self.assertIn("sourceTableName", table_status["data"][0])
51
+
52
+ # fc.stop_mirroring(workspace_id=workspace_id, mirrored_database_id=mirrored_db_w_content.id)
53
+
54
+ # definition = fc.get_mirrored_database_definition(workspace_id, mirrored_db_w_content.id)
55
+ # self.assertIsNotNone(definition)
56
+ # self.assertIn("definition", definition)
57
+ # self.assertIn("parts", definition["definition"])
58
+
59
+ # mirrored_db = fc.create_mirrored_database(workspace_id, display_name=mirrored_db_name)
60
+
61
+ # mirrored_db_check = fc.get_mirrored_database(workspace_id, mirrored_database_id=mirrored_db.id)
62
+ # self.assertEqual(mirrored_db_check.display_name, mirrored_db_name)
63
+ # self.assertIsNotNone(mirrored_db_check.id)
64
+ # self.assertEqual(mirrored_db_check.id, mirrored_db_check.id)
65
+
66
+ # mirrored_dbs = fc.list_mirrored_databases(workspace_id)
67
+ # mirrored_db_names = [md.display_name for md in mirrored_dbs]
68
+ # self.assertGreater(len(mirrored_dbs), 0)
69
+ # self.assertIn(mirrored_db_name, mirrored_db_names)
70
+
71
+ # sleep(60)
72
+
73
+ # mirrored_db_2 = fc.update_mirrored_database(workspace_id, mirrored_db_check.id,
74
+ # display_name=f"u{mirrored_db_name}", return_item=True)
75
+ # mirrored_db_2 = fc.get_mirrored_database(workspace_id, mirrored_database_id=mirrored_db_2.id)
76
+
77
+ # self.assertEqual(mirrored_db_2.display_name, f"u{mirrored_db_name}")
78
+
79
+ # status_code = fc.delete_mirrored_database(workspace_id, mirrored_db_2.id)
80
+ # self.assertEqual(status_code, 200)
@@ -0,0 +1,47 @@
1
+ import unittest
2
+ from datetime import datetime
3
+ from dotenv import load_dotenv
4
+ from time import sleep
5
+ from msfabricpysdkcore.coreapi import FabricClientCore
6
+
7
+ load_dotenv()
8
+
9
+ class TestFabricClientCore(unittest.TestCase):
10
+
11
+ def __init__(self, *args, **kwargs):
12
+ super(TestFabricClientCore, self).__init__(*args, **kwargs)
13
+ #load_dotenv()
14
+ self.fc = FabricClientCore()
15
+
16
+ datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
17
+ self.item_name = "testitem" + datetime_str
18
+ self.item_type = "Notebook"
19
+
20
+
21
+ def test_ml_experiments(self):
22
+
23
+ fc = self.fc
24
+ workspace_id = '63aa9e13-4912-4abe-9156-8a56e565b7a3'
25
+ mlexperiment_name = "mlexp" + datetime.now().strftime("%Y%m%d%H%M%S")
26
+ mlexperiment_name2 = "mlexp2" + datetime.now().strftime("%Y%m%d%H%M%S")
27
+
28
+ ml_experiment = fc.create_ml_experiment(workspace_id, display_name=mlexperiment_name)
29
+ self.assertEqual(ml_experiment.display_name, mlexperiment_name)
30
+
31
+ ml_experiments = fc.list_ml_experiments(workspace_id)
32
+ ml_experiment_names = [mle.display_name for mle in ml_experiments]
33
+ self.assertGreater(len(ml_experiments), 0)
34
+ self.assertIn(mlexperiment_name, ml_experiment_names)
35
+
36
+ mle = fc.get_ml_experiment(workspace_id, ml_experiment_name=mlexperiment_name)
37
+ self.assertIsNotNone(mle.id)
38
+ self.assertEqual(mle.display_name, mlexperiment_name)
39
+
40
+ mle2 = fc.update_ml_experiment(workspace_id, mle.id, display_name=mlexperiment_name2, return_item=True)
41
+
42
+ mle = fc.get_ml_experiment(workspace_id, ml_experiment_id=mle.id)
43
+ self.assertEqual(mle.display_name, mlexperiment_name2)
44
+ self.assertEqual(mle.id, mle2.id)
45
+
46
+ status_code = fc.delete_ml_experiment(workspace_id, mle.id)
47
+ self.assertEqual(status_code, 200)
@@ -0,0 +1,47 @@
1
+ import unittest
2
+ from datetime import datetime
3
+ from dotenv import load_dotenv
4
+ from time import sleep
5
+ from msfabricpysdkcore.coreapi import FabricClientCore
6
+
7
+ load_dotenv()
8
+
9
+ class TestFabricClientCore(unittest.TestCase):
10
+
11
+ def __init__(self, *args, **kwargs):
12
+ super(TestFabricClientCore, self).__init__(*args, **kwargs)
13
+ #load_dotenv()
14
+ self.fc = FabricClientCore()
15
+
16
+ datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
17
+ self.item_name = "testitem" + datetime_str
18
+ self.item_type = "Notebook"
19
+
20
+
21
+ def test_ml_models(self):
22
+
23
+ fc = self.fc
24
+ workspace_id = '63aa9e13-4912-4abe-9156-8a56e565b7a3'
25
+ datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
26
+ model_name = "mlm" + datetime_str
27
+
28
+ ml_model = fc.create_ml_model(workspace_id, display_name=model_name)
29
+ self.assertEqual(ml_model.display_name, model_name)
30
+
31
+ ml_models = fc.list_ml_models(workspace_id)
32
+ ml_model_names = [ml.display_name for ml in ml_models]
33
+ self.assertGreater(len(ml_models), 0)
34
+ self.assertIn(model_name, ml_model_names)
35
+
36
+ mlm = fc.get_ml_model(workspace_id, ml_model_name=model_name)
37
+ self.assertIsNotNone(mlm.id)
38
+ self.assertEqual(mlm.display_name, model_name)
39
+
40
+ mlm2 = fc.update_ml_model(workspace_id=workspace_id,ml_model_id= mlm.id, description=model_name, return_item=True)
41
+
42
+ mlm = fc.get_ml_model(workspace_id, ml_model_id=mlm.id)
43
+ self.assertEqual(mlm.description, model_name)
44
+ self.assertEqual(mlm.id, mlm2.id)
45
+
46
+ status_code = fc.delete_ml_model(workspace_id, mlm.id)
47
+ self.assertEqual(status_code, 200)
@@ -0,0 +1,64 @@
1
+ import unittest
2
+ from datetime import datetime
3
+ from dotenv import load_dotenv
4
+ from time import sleep
5
+ from msfabricpysdkcore.coreapi import FabricClientCore
6
+
7
+ load_dotenv()
8
+
9
+ # class TestFabricClientCore(unittest.TestCase):
10
+
11
+ class TestFabricClientCore(unittest.TestCase):
12
+
13
+ def __init__(self, *args, **kwargs):
14
+ super(TestFabricClientCore, self).__init__(*args, **kwargs)
15
+ #load_dotenv()
16
+ self.fc = FabricClientCore()
17
+
18
+
19
+ def test_mounted_adf(self):
20
+
21
+ fc = self.fc
22
+ workspace_id = '63aa9e13-4912-4abe-9156-8a56e565b7a3'
23
+
24
+ definition = {'parts': [{'path': 'mountedDataFactory-content.json',
25
+ 'payload': 'ewogICJkYXRhRmFjdG9yeVJlc291cmNlSWQiOiAiL3N1YnNjcmlwdGlvbnMvYzc3Y2M4ZmMtNDNiYi00ZDQ0LWJkYzUtNmUyMDUxMWVkMmE4L3Jlc291cmNlR3JvdXBzL2ZhYnJpY2RlbW8vcHJvdmlkZXJzL01pY3Jvc29mdC5EYXRhRmFjdG9yeS9mYWN0b3JpZXMvZmFicmljYWRmMjAyNTAzMDYiCn0=',
26
+ 'payloadType': 'InlineBase64'},
27
+ {'path': '.platform',
28
+ 'payload': 'ewogICIkc2NoZW1hIjogImh0dHBzOi8vZGV2ZWxvcGVyLm1pY3Jvc29mdC5jb20vanNvbi1zY2hlbWFzL2ZhYnJpYy9naXRJbnRlZ3JhdGlvbi9wbGF0Zm9ybVByb3BlcnRpZXMvMi4wLjAvc2NoZW1hLmpzb24iLAogICJtZXRhZGF0YSI6IHsKICAgICJ0eXBlIjogIk1vdW50ZWREYXRhRmFjdG9yeSIsCiAgICAiZGlzcGxheU5hbWUiOiAiZmFicmljYWRmMjAyNTAzMDYiCiAgfSwKICAiY29uZmlnIjogewogICAgInZlcnNpb24iOiAiMi4wIiwKICAgICJsb2dpY2FsSWQiOiAiMDAwMDAwMDAtMDAwMC0wMDAwLTAwMDAtMDAwMDAwMDAwMDAwIgogIH0KfQ==',
29
+ 'payloadType': 'InlineBase64'}]}
30
+
31
+ datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
32
+
33
+ mounted_adf_name = "mounted_adf" + datetime_str
34
+ mounted_adf = fc.create_mounted_data_factory(workspace_id,
35
+ display_name=mounted_adf_name,
36
+ definition=definition)
37
+
38
+ self.assertEqual(mounted_adf.display_name, mounted_adf_name)
39
+
40
+ mounted_adfs = fc.list_mounted_data_factories(workspace_id)
41
+ mounted_adf_names = [adf.display_name for adf in mounted_adfs]
42
+ self.assertGreater(len(mounted_adfs), 0)
43
+ self.assertIn(mounted_adf_name, mounted_adf_names)
44
+
45
+ adf = fc.get_mounted_data_factory(workspace_id, mounted_data_factory_name=mounted_adf_name)
46
+ self.assertIsNotNone(adf.id)
47
+ self.assertEqual(adf.display_name, mounted_adf_name)
48
+
49
+ adf2 = fc.update_mounted_data_factory(workspace_id, adf.id, display_name=f"{mounted_adf_name}2", return_item=True)
50
+
51
+ adf = fc.get_mounted_data_factory(workspace_id, mounted_data_factory_id=adf.id)
52
+ self.assertEqual(adf.display_name, f"{mounted_adf_name}2")
53
+ self.assertEqual(adf.id, adf2.id)
54
+
55
+ response = fc.update_mounted_data_factory_definition(workspace_id, mounted_data_factory_id=adf.id, definition=adf.definition)
56
+ self.assertIn(response.status_code, [200, 202])
57
+
58
+ definition = fc.get_mounted_data_factory_definition(workspace_id, mounted_data_factory_id=adf.id)
59
+ self.assertIn("definition", definition)
60
+ self.assertIn("parts", definition["definition"])
61
+ self.assertGreaterEqual(len(definition["definition"]["parts"]), 2)
62
+
63
+ status_code = fc.delete_mounted_data_factory(workspace_id, adf.id)
64
+ self.assertEqual(status_code, 200)