msfabricpysdkcore 0.2.5__py3-none-any.whl → 0.2.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. msfabricpysdkcore/coreapi.py +742 -32
  2. msfabricpysdkcore/otheritems.py +61 -1
  3. msfabricpysdkcore/workspace.py +161 -9
  4. {msfabricpysdkcore-0.2.5.dist-info → msfabricpysdkcore-0.2.7.dist-info}/METADATA +4 -2
  5. msfabricpysdkcore-0.2.7.dist-info/RECORD +30 -0
  6. msfabricpysdkcore/tests/__init__.py +0 -0
  7. msfabricpysdkcore/tests/test_admin_apis.py +0 -174
  8. msfabricpysdkcore/tests/test_admin_tags.py +0 -46
  9. msfabricpysdkcore/tests/test_connection.py +0 -111
  10. msfabricpysdkcore/tests/test_copy_jobs.py +0 -60
  11. msfabricpysdkcore/tests/test_dataflows.py +0 -60
  12. msfabricpysdkcore/tests/test_datapipelines.py +0 -60
  13. msfabricpysdkcore/tests/test_deployment_pipeline.py +0 -63
  14. msfabricpysdkcore/tests/test_deployment_pipelinev2.py +0 -135
  15. msfabricpysdkcore/tests/test_domains.py +0 -126
  16. msfabricpysdkcore/tests/test_environments.py +0 -114
  17. msfabricpysdkcore/tests/test_evenhouses.py +0 -56
  18. msfabricpysdkcore/tests/test_evenstreams.py +0 -52
  19. msfabricpysdkcore/tests/test_eventstream_topology.py +0 -82
  20. msfabricpysdkcore/tests/test_external_data_shares.py +0 -51
  21. msfabricpysdkcore/tests/test_fabric_azure_client.py +0 -80
  22. msfabricpysdkcore/tests/test_folders.py +0 -53
  23. msfabricpysdkcore/tests/test_gateways.py +0 -99
  24. msfabricpysdkcore/tests/test_git.py +0 -66
  25. msfabricpysdkcore/tests/test_graphqlapi.py +0 -44
  26. msfabricpysdkcore/tests/test_items.py +0 -97
  27. msfabricpysdkcore/tests/test_jobs.py +0 -96
  28. msfabricpysdkcore/tests/test_kql_dashboards.py +0 -63
  29. msfabricpysdkcore/tests/test_kql_queryset.py +0 -64
  30. msfabricpysdkcore/tests/test_kqldatabases.py +0 -56
  31. msfabricpysdkcore/tests/test_lakehouse.py +0 -93
  32. msfabricpysdkcore/tests/test_managed_private_endpoints.py +0 -61
  33. msfabricpysdkcore/tests/test_mirroreddatabases.py +0 -80
  34. msfabricpysdkcore/tests/test_ml_experiments.py +0 -47
  35. msfabricpysdkcore/tests/test_ml_models.py +0 -47
  36. msfabricpysdkcore/tests/test_mounted_adf.py +0 -64
  37. msfabricpysdkcore/tests/test_notebooks.py +0 -57
  38. msfabricpysdkcore/tests/test_one_lake_data_access_security.py +0 -63
  39. msfabricpysdkcore/tests/test_other_items.py +0 -45
  40. msfabricpysdkcore/tests/test_reflex.py +0 -57
  41. msfabricpysdkcore/tests/test_reports.py +0 -56
  42. msfabricpysdkcore/tests/test_semantic_model.py +0 -56
  43. msfabricpysdkcore/tests/test_shortcuts.py +0 -60
  44. msfabricpysdkcore/tests/test_spark.py +0 -91
  45. msfabricpysdkcore/tests/test_sparkjobdefinition.py +0 -55
  46. msfabricpysdkcore/tests/test_sqldatabases.py +0 -45
  47. msfabricpysdkcore/tests/test_tags.py +0 -28
  48. msfabricpysdkcore/tests/test_variable_libary.py +0 -61
  49. msfabricpysdkcore/tests/test_warehouses.py +0 -50
  50. msfabricpysdkcore/tests/test_workspaces_capacities.py +0 -159
  51. msfabricpysdkcore-0.2.5.dist-info/RECORD +0 -75
  52. {msfabricpysdkcore-0.2.5.dist-info → msfabricpysdkcore-0.2.7.dist-info}/WHEEL +0 -0
  53. {msfabricpysdkcore-0.2.5.dist-info → msfabricpysdkcore-0.2.7.dist-info}/licenses/LICENSE +0 -0
  54. {msfabricpysdkcore-0.2.5.dist-info → msfabricpysdkcore-0.2.7.dist-info}/top_level.txt +0 -0
@@ -1,44 +0,0 @@
1
- import unittest
2
- from datetime import datetime
3
- from dotenv import load_dotenv
4
- from time import sleep
5
- from msfabricpysdkcore.coreapi import FabricClientCore
6
-
7
- load_dotenv()
8
-
9
- # class TestFabricClientCore(unittest.TestCase):
10
-
11
- class TestFabricClientCore(unittest.TestCase):
12
-
13
- def __init__(self, *args, **kwargs):
14
- super(TestFabricClientCore, self).__init__(*args, **kwargs)
15
- #load_dotenv()
16
- self.fc = FabricClientCore()
17
-
18
-
19
- def test_graphql_api(self):
20
-
21
- fc = self.fc
22
- workspace_id = '46425c13-5736-4285-972c-6d034020f3ff'
23
-
24
- datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
25
-
26
- graph_ql = fc.create_graphql_api(workspace_id, display_name="graphql" + datetime_str)
27
- self.assertEqual(graph_ql.display_name, "graphql" + datetime_str)
28
-
29
- graph_qls = fc.list_graphql_apis(workspace_id)
30
- graph_ql_names = [gql.display_name for gql in graph_qls]
31
- self.assertGreater(len(graph_qls), 0)
32
- self.assertIn("graphql" + datetime_str, graph_ql_names)
33
-
34
- gql = fc.get_graphql_api(workspace_id, graphql_api_name="graphql" + datetime_str)
35
- self.assertIsNotNone(gql.id)
36
- self.assertEqual(gql.display_name, "graphql" + datetime_str)
37
-
38
- gql2 = fc.update_graphql_api(workspace_id, gql.id, display_name=f"graphql{datetime_str}2", return_item=True)
39
-
40
- gql = fc.get_graphql_api(workspace_id, graphql_api_id=gql.id)
41
- self.assertEqual(gql.display_name, f"graphql{datetime_str}2")
42
-
43
- status_code = fc.delete_graphql_api(workspace_id, gql.id)
44
- self.assertEqual(status_code, 200)
@@ -1,97 +0,0 @@
1
- import unittest
2
- from datetime import datetime
3
- from dotenv import load_dotenv
4
- from time import sleep
5
- from msfabricpysdkcore.coreapi import FabricClientCore
6
-
7
- load_dotenv()
8
-
9
- class TestFabricClientCore(unittest.TestCase):
10
-
11
- def __init__(self, *args, **kwargs):
12
- super(TestFabricClientCore, self).__init__(*args, **kwargs)
13
- #load_dotenv()
14
- self.fc = FabricClientCore()
15
- self.workspace_id = '63aa9e13-4912-4abe-9156-8a56e565b7a3'
16
-
17
-
18
- datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
19
- self.item_name = "testitem" + datetime_str
20
- self.item_type = "Notebook"
21
-
22
- def test_item_end_to_end(self):
23
-
24
- item = self.fc.create_item(display_name=self.item_name, type=self.item_type, workspace_id=self.workspace_id)
25
- self.assertEqual(item.display_name, self.item_name)
26
- self.assertEqual(item.type, self.item_type)
27
- self.assertEqual(item.workspace_id, self.workspace_id)
28
- self.assertEqual(item.description, "")
29
-
30
- item = self.fc.get_item(workspace_id=self.workspace_id, item_id=item.id)
31
- item_ = self.fc.get_item(workspace_id=self.workspace_id,
32
- item_name=self.item_name, item_type=self.item_type)
33
- self.assertEqual(item.id, item_.id)
34
- self.assertEqual(item.display_name, self.item_name)
35
- self.assertEqual(item.type, self.item_type)
36
- self.assertEqual(item.workspace_id, self.workspace_id)
37
- self.assertEqual(item.description, "")
38
-
39
- item_list = self.fc.list_items(workspace_id=self.workspace_id)
40
- self.assertTrue(len(item_list) > 0)
41
-
42
- item_ids = [item_.id for item_ in item_list]
43
- self.assertIn(item.id, item_ids)
44
-
45
- self.fc.update_item(workspace_id=self.workspace_id, item_id=item.id, display_name=f"u{self.item_name}", return_item=True)
46
- item = self.fc.get_item(workspace_id=self.workspace_id, item_id=item.id)
47
- self.assertEqual(item.display_name, f"u{self.item_name}")
48
-
49
- status_code = self.fc.delete_item(workspace_id=self.workspace_id, item_id=item.id)
50
-
51
- self.assertAlmostEqual(status_code, 200)
52
-
53
- def test_item_definition(self):
54
-
55
- sjd = self.fc.get_item(workspace_id=self.workspace_id, item_name="helloworld", item_type="SparkJobDefinition")
56
- self.assertIsNotNone(sjd.definition)
57
- datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
58
- blubb2 = "blubb2" + datetime_str
59
- blubb3 = "blubb3" + datetime_str
60
- blubb2 = self.fc.create_item(display_name=blubb2, type="SparkJobDefinition", workspace_id=self.workspace_id,
61
- definition=sjd.definition)
62
-
63
- blubb3 = self.fc.create_item(display_name=blubb3, type="SparkJobDefinition", workspace_id=self.workspace_id)
64
-
65
- response = self.fc.update_item_definition(workspace_id=self.workspace_id,
66
- item_id=blubb3.id, definition=sjd.definition)
67
-
68
- self.assertEqual(response.status_code, 200)
69
- blubb3 = self.fc.get_item(workspace_id=self.workspace_id, item_id=blubb3.id)
70
-
71
- self.assertIn("parts", blubb3.definition)
72
-
73
- self.assertEqual(len(blubb3.definition["parts"]), len(sjd.definition["parts"]))
74
- sjd_defintion = [part["path"] for part in sjd.definition["parts"] if part["path"] == "SparkJobDefinitionV1.json"]
75
- blubb3_defintion = [part["path"] for part in blubb3.definition["parts"] if part["path"] == "SparkJobDefinitionV1.json"]
76
- self.assertEqual(sjd_defintion, blubb3_defintion)
77
-
78
- self.assertNotEqual(blubb2.id, sjd.id)
79
- self.assertIn("parts", blubb2.definition)
80
-
81
- self.assertEqual(len(blubb2.definition["parts"]), len(sjd.definition["parts"]))
82
- sjd_defintion = [part["path"] for part in sjd.definition["parts"] if part["path"] == "SparkJobDefinitionV1.json"]
83
- blubb2_defintion = [part["path"] for part in blubb2.definition["parts"] if part["path"] == "SparkJobDefinitionV1.json"]
84
- self.assertEqual(sjd_defintion, blubb2_defintion)
85
- self.assertNotEqual(blubb2.id, blubb3.id)
86
-
87
- blubb2.delete()
88
- blubb3.delete()
89
-
90
- def test_item_connections(self):
91
-
92
- fc = self.fc
93
- connections = fc.list_item_connections(workspace_id = '63aa9e13-4912-4abe-9156-8a56e565b7a3', item_id = '1bcc8b76-8e1f-428d-a594-f91ce1b9b076')
94
- self.assertEqual(len(connections), 0)
95
-
96
- if __name__ == "__main__":
97
- unittest.main()
@@ -1,96 +0,0 @@
1
- import unittest
2
- from msfabricpysdkcore.coreapi import FabricClientCore
3
- from datetime import datetime
4
- from dotenv import load_dotenv
5
-
6
- load_dotenv()
7
-
8
-
9
- class TestFabricClientCore(unittest.TestCase):
10
-
11
- def __init__(self, *args, **kwargs):
12
- super(TestFabricClientCore, self).__init__(*args, **kwargs)
13
- #load_dotenv()
14
- self.fc = FabricClientCore()
15
- self.workspace_id = '63aa9e13-4912-4abe-9156-8a56e565b7a3'
16
- self.item_id = "38a1c15f-8a9e-49c5-8d05-a27cf9ce8b18"
17
-
18
-
19
- def test_jobs_end_to_end(self):
20
- job = self.fc.run_on_demand_item_job(workspace_id=self.workspace_id,
21
- item_id=self.item_id,
22
- job_type="RunNotebook")
23
-
24
- self.assertEqual(job.item_id, self.item_id)
25
- self.assertEqual(job.workspace_id, self.workspace_id)
26
- self.assertEqual(job.job_type, "RunNotebook")
27
- self.assertIn(job.status, ["NotStarted", "InProgress", "Failed"])
28
- self.assertEqual(job.invoke_type, "Manual")
29
-
30
- job2 = self.fc.get_item_job_instance(workspace_id=self.workspace_id,
31
- item_id=self.item_id,
32
- job_instance_id=job.id)
33
-
34
- self.assertEqual(job.id, job2.id)
35
-
36
- status_code = self.fc.cancel_item_job_instance(workspace_id=self.workspace_id,
37
- item_id=self.item_id,
38
- job_instance_id=job.id)
39
-
40
- self.assertEqual(status_code, 202)
41
-
42
- job_instances = self.fc.list_item_job_instances(workspace_id=self.workspace_id,
43
- item_id=self.item_id)
44
-
45
- self.assertGreaterEqual(len(job_instances), 1)
46
-
47
- def test_item_schedules(self):
48
-
49
- fc = self.fc
50
-
51
- item_id = "42b6e090-24ff-4dc7-8c52-cdae0ddd2c06"
52
-
53
- workspace_id = '63aa9e13-4912-4abe-9156-8a56e565b7a3'
54
-
55
- datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
56
- spark_job_definition_name = f"sjd{datetime_str}"
57
-
58
- spark_job_definition_w_content = fc.get_spark_job_definition(workspace_id, spark_job_definition_name="helloworld")
59
- definition = fc.get_spark_job_definition_definition(workspace_id, spark_job_definition_w_content.id)
60
-
61
- self.assertIsNotNone(definition)
62
- self.assertIn("definition", definition)
63
- definition = definition["definition"]
64
-
65
- spark_job_definition = fc.create_spark_job_definition(workspace_id, display_name=spark_job_definition_name, definition=definition)
66
-
67
- self.assertIsNotNone(spark_job_definition)
68
-
69
- configuration = {'type': 'Daily',
70
- 'startDateTime': '2024-11-21T00:00:00',
71
- 'endDateTime': '2028-11-08T23:59:00',
72
- 'localTimeZoneId': 'Romance Standard Time',
73
- 'times': ['15:39']}
74
-
75
- schedule = spark_job_definition.create_item_schedule(job_type="sparkjob", configuration=configuration, enabled=True)
76
-
77
- schedule_id = schedule["id"]
78
- schedule_check = spark_job_definition.get_item_schedule(schedule_id=schedule_id, job_type="sparkjob")
79
- self.assertIsNotNone(schedule_check)
80
- self.assertEqual(schedule_check["id"], schedule_id)
81
-
82
- schedule_new = spark_job_definition.update_item_schedule(schedule_id=schedule_id, job_type="sparkjob", configuration=configuration, enabled=False)
83
- self.assertIsNotNone(schedule_new)
84
-
85
- schedule_check = spark_job_definition.get_item_schedule(schedule_id=schedule_id, job_type="sparkjob")
86
- self.assertEqual(schedule_check["id"], schedule_id)
87
- self.assertFalse(schedule_check["enabled"])
88
- list_schedules = fc.list_item_schedules(workspace_id, item_id, job_type="sparkjob")
89
-
90
- self.assertGreater(len(list_schedules), 0)
91
-
92
- spark_job_definition.delete()
93
-
94
- if __name__ == "__main__":
95
- unittest.main()
96
-
@@ -1,63 +0,0 @@
1
- import unittest
2
- from datetime import datetime
3
- from dotenv import load_dotenv
4
- from time import sleep
5
- from msfabricpysdkcore.coreapi import FabricClientCore
6
-
7
- load_dotenv()
8
-
9
- class TestFabricClientCore(unittest.TestCase):
10
-
11
- def __init__(self, *args, **kwargs):
12
- super(TestFabricClientCore, self).__init__(*args, **kwargs)
13
- #load_dotenv()
14
- self.fc = FabricClientCore()
15
-
16
-
17
- def test_kql_dashboards(self):
18
- datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
19
- fc = self.fc
20
- workspace_id = '63aa9e13-4912-4abe-9156-8a56e565b7a3'
21
-
22
- kql_dash = fc.get_kql_dashboard(workspace_id, kql_dashboard_name='sampledashboard')
23
- kql_dash_orig_id = kql_dash.id
24
-
25
-
26
- kql_dash_name = "testdash" + datetime_str
27
-
28
- kql_dash = fc.create_kql_dashboard(display_name=kql_dash_name, workspace_id=workspace_id)
29
- self.assertEqual(kql_dash.display_name, kql_dash_name)
30
-
31
- definition_orig = fc.get_kql_dashboard_definition(workspace_id, kql_dash_orig_id)
32
- definition_orig = definition_orig["definition"]
33
- self.assertIsNotNone(definition_orig)
34
-
35
- definition = fc.update_kql_dashboard_definition(workspace_id, kql_dash.id, definition=definition_orig)
36
-
37
- self.assertIsNotNone(definition)
38
-
39
- kql_dashs = fc.list_kql_dashboards(workspace_id)
40
-
41
- kql_dash_names = [kqld.display_name for kqld in kql_dashs]
42
- self.assertGreater(len(kql_dashs), 0)
43
- self.assertIn(kql_dash_name, kql_dash_names)
44
- self.assertIn('sampledashboard', kql_dash_names)
45
-
46
- kql_dash2 = fc.get_kql_dashboard(workspace_id, kql_dashboard_name=kql_dash_name)
47
- self.assertIsNotNone(kql_dash2.id)
48
- self.assertEqual(kql_dash2.display_name, kql_dash_name)
49
-
50
- new_name = kql_dash_name+"2"
51
- kql_dash3 = fc.update_kql_dashboard(workspace_id, kql_dash.id, display_name=new_name, return_item=True)
52
-
53
- self.assertEqual(kql_dash3.display_name, new_name)
54
- self.assertEqual(kql_dash.id, kql_dash3.id)
55
-
56
- resp_code = fc.delete_kql_dashboard(workspace_id, kql_dash3.id)
57
- self.assertEqual(resp_code, 200)
58
-
59
- kql_dashs = fc.list_kql_dashboards(workspace_id)
60
-
61
- kql_dash_names = [kqld.display_name for kqld in kql_dashs]
62
- self.assertNotIn(kql_dash3.display_name, kql_dash_names)
63
-
@@ -1,64 +0,0 @@
1
- import unittest
2
- from datetime import datetime
3
- from dotenv import load_dotenv
4
- from time import sleep
5
- from msfabricpysdkcore.coreapi import FabricClientCore
6
-
7
- load_dotenv()
8
-
9
- class TestFabricClientCore(unittest.TestCase):
10
-
11
- def __init__(self, *args, **kwargs):
12
- super(TestFabricClientCore, self).__init__(*args, **kwargs)
13
- #load_dotenv()
14
- self.fc = FabricClientCore()
15
-
16
- datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
17
- self.item_name = "testitem" + datetime_str
18
- self.item_type = "Notebook"
19
-
20
- def test_kql_querysets(self):
21
-
22
- fc = self.fc
23
- workspace_id = '63aa9e13-4912-4abe-9156-8a56e565b7a3'
24
-
25
- kql_queryset_name = "kqlqueryset12"
26
- kqlq_w_content = fc.get_kql_queryset(workspace_id, kql_queryset_name=kql_queryset_name)
27
-
28
- definition = fc.get_kql_queryset_definition(workspace_id, kqlq_w_content.id)
29
- self.assertIsNotNone(definition)
30
- self.assertIn("definition", definition)
31
- definition = definition["definition"]
32
-
33
- self.assertIsNotNone(kqlq_w_content.id)
34
- self.assertEqual(kqlq_w_content.display_name, kql_queryset_name)
35
-
36
- datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
37
- kql_queryset_new = "kqlq" + datetime_str
38
-
39
- kqlq = fc.create_kql_queryset(workspace_id, definition=definition, display_name=kql_queryset_new)
40
- self.assertIsNotNone(kqlq.id)
41
- self.assertEqual(kqlq.display_name, kql_queryset_new)
42
-
43
- fc.update_kql_queryset_definition(workspace_id, kqlq.id, definition=definition)
44
- kqlq = fc.get_kql_queryset(workspace_id, kqlq.id)
45
- self.assertEqual(kqlq.display_name, kql_queryset_new)
46
- self.assertIsNotNone(kqlq.definition)
47
-
48
- kqlqs = fc.list_kql_querysets(workspace_id)
49
- kqlq_names = [kql.display_name for kql in kqlqs]
50
- self.assertGreater(len(kqlqs), 0)
51
- self.assertIn(kql_queryset_new, kqlq_names)
52
-
53
- kqlq = fc.get_kql_queryset(workspace_id, kql_queryset_name=kql_queryset_new)
54
- self.assertIsNotNone(kqlq.id)
55
- self.assertEqual(kqlq.display_name, kql_queryset_new)
56
-
57
- kqlq2 = fc.update_kql_queryset(workspace_id, kql_queryset_id=kqlq.id, display_name=f"{kql_queryset_new}2", return_item=True)
58
-
59
- kqlq = fc.get_kql_queryset(workspace_id, kql_queryset_id=kqlq.id)
60
- self.assertEqual(kqlq.display_name, f"{kql_queryset_new}2")
61
- self.assertEqual(kqlq.id, kqlq2.id)
62
-
63
- status_code = fc.delete_kql_queryset(workspace_id, kqlq.id)
64
- self.assertEqual(status_code, 200)
@@ -1,56 +0,0 @@
1
- import unittest
2
- from datetime import datetime
3
- from dotenv import load_dotenv
4
- from time import sleep
5
- from msfabricpysdkcore.coreapi import FabricClientCore
6
-
7
- load_dotenv()
8
-
9
- class TestFabricClientCore(unittest.TestCase):
10
-
11
- def __init__(self, *args, **kwargs):
12
- super(TestFabricClientCore, self).__init__(*args, **kwargs)
13
- #load_dotenv()
14
- self.fc = FabricClientCore()
15
-
16
- def test_kql_database(self):
17
-
18
- fc = self.fc
19
- workspace_id = '63aa9e13-4912-4abe-9156-8a56e565b7a3'
20
- evenhouse_id = "f30ba76a-92c3-40d3-ad69-36db059c113d"
21
-
22
- creation_payload = {"databaseType" : "ReadWrite",
23
- "parentEventhouseItemId" : evenhouse_id}
24
-
25
- datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
26
- kqldb_name = "kql" + datetime_str
27
- kqldb = fc.create_kql_database(workspace_id = workspace_id, display_name=kqldb_name,
28
- creation_payload=creation_payload)
29
- self.assertEqual(kqldb.display_name, kqldb_name)
30
-
31
- kql_databases = fc.list_kql_databases(workspace_id)
32
- kql_database_names = [kqldb.display_name for kqldb in kql_databases]
33
- self.assertGreater(len(kql_databases), 0)
34
- self.assertIn(kqldb_name, kql_database_names)
35
-
36
- kqldb = fc.get_kql_database(workspace_id, kql_database_name=kqldb_name)
37
- self.assertIsNotNone(kqldb.id)
38
- self.assertEqual(kqldb.display_name, kqldb_name)
39
-
40
- new_name = kqldb_name+"2"
41
- kqldb2 = fc.update_kql_database(workspace_id, kqldb.id, display_name=new_name, return_item=True)
42
-
43
- kqldb = fc.get_kql_database(workspace_id, kql_database_id=kqldb.id)
44
- self.assertEqual(kqldb.display_name, new_name)
45
- self.assertEqual(kqldb.id, kqldb2.id)
46
-
47
- response = fc.update_kql_database_definition(workspace_id, kqldb.id, kqldb.definition)
48
- self.assertIn(response.status_code, [200, 202])
49
-
50
- definition = fc.get_kql_database_definition(workspace_id, kql_database_id=kqldb.id)
51
- self.assertIn("definition", definition)
52
- self.assertIn("parts", definition["definition"])
53
- self.assertGreaterEqual(len(definition["definition"]["parts"]), 3)
54
-
55
- status_code = fc.delete_kql_database(workspace_id, kqldb.id)
56
- self.assertEqual(status_code, 200)
@@ -1,93 +0,0 @@
1
- import unittest
2
- from datetime import datetime
3
- from dotenv import load_dotenv
4
- from time import sleep
5
- from msfabricpysdkcore.coreapi import FabricClientCore
6
-
7
- load_dotenv()
8
-
9
- class TestFabricClientCore(unittest.TestCase):
10
-
11
- def __init__(self, *args, **kwargs):
12
- super(TestFabricClientCore, self).__init__(*args, **kwargs)
13
- #load_dotenv()
14
- self.fc = FabricClientCore()
15
-
16
- datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
17
- self.item_name = "testitem" + datetime_str
18
- self.item_type = "Notebook"
19
-
20
- def test_lakehouse(self):
21
-
22
- lakehouse2 = "lh2" + datetime.now().strftime("%Y%m%d%H%M%S")
23
- lakehouse3 = "lh3" + datetime.now().strftime("%Y%m%d%H%M%S")
24
-
25
- workspace_id = '63aa9e13-4912-4abe-9156-8a56e565b7a3'
26
- lhs = self.fc.list_lakehouses(workspace_id = workspace_id, with_properties=True)
27
- lh = [lh_ for lh_ in lhs if lh_.display_name == "lakelhousewlabels"][0]
28
- tables = lh.list_tables()
29
- table_names = [t["name"] for t in tables]
30
- self.assertIn("titanic2", table_names)
31
-
32
- lakehouse = self.fc.get_item(workspace_id=workspace_id, item_name="lakelhousewlabels", item_type="Lakehouse")
33
- self.assertIsNotNone(lakehouse.properties)
34
- lakehouse_id = lakehouse.id
35
- date_str = datetime.now().strftime("%Y%m%d%H%M%S")
36
- table_name = f"table{date_str}"
37
-
38
-
39
- status_code = self.fc.load_table(workspace_id=workspace_id, lakehouse_id=lakehouse_id, table_name=table_name,
40
- path_type="File", relative_path="Files/to_share/titanic2.csv")
41
-
42
- self.assertEqual(status_code, 202)
43
-
44
- # Run on demand table maintenance
45
- table_name_maintenance = "table20240515114529"
46
-
47
- execution_data = {
48
- "tableName": table_name_maintenance,
49
- "optimizeSettings": {
50
- "vOrder": True,
51
- "zOrderBy": [
52
- "tipAmount"
53
- ]
54
- },
55
- "vacuumSettings": {
56
- "retentionPeriod": "7:01:00:00"
57
- }
58
- }
59
-
60
- response = self.fc.run_on_demand_table_maintenance(workspace_id=workspace_id, lakehouse_id=lakehouse_id,
61
- execution_data = execution_data,
62
- job_type = "TableMaintenance", wait_for_completion = False)
63
- self.assertIn(response.status_code, [200, 202])
64
-
65
- table_list = self.fc.list_tables(workspace_id=workspace_id, lakehouse_id=lakehouse_id)
66
- table_names = [table["name"] for table in table_list]
67
-
68
- self.assertIn(table_name, table_names)
69
-
70
- fc = self.fc
71
-
72
- lakehouse = fc.create_lakehouse(workspace_id=workspace_id, display_name=lakehouse2)
73
- self.assertIsNotNone(lakehouse.id)
74
-
75
- lakehouses = fc.list_lakehouses(workspace_id)
76
- lakehouse_names = [lh.display_name for lh in lakehouses]
77
- self.assertGreater(len(lakehouse_names), 0)
78
- self.assertIn(lakehouse2, lakehouse_names)
79
-
80
- lakehouse2 = fc.get_lakehouse(workspace_id=workspace_id, lakehouse_id=lakehouse.id)
81
- self.assertEqual(lakehouse.id, lakehouse2.id)
82
-
83
- sleep(20)
84
- lakehouse2 = fc.update_lakehouse(workspace_id=workspace_id, lakehouse_id=lakehouse.id, display_name=lakehouse3, return_item=True)
85
- self.assertEqual(lakehouse2.display_name, lakehouse3)
86
-
87
- id = lakehouse2.id
88
-
89
- lakehouse2 = fc.get_lakehouse(workspace_id=workspace_id, lakehouse_name=lakehouse3)
90
- self.assertEqual(lakehouse2.id, id)
91
-
92
- status_code = fc.delete_lakehouse(workspace_id=workspace_id, lakehouse_id=lakehouse.id)
93
- self.assertEqual(status_code, 200)
@@ -1,61 +0,0 @@
1
- import unittest
2
- from datetime import datetime
3
- from dotenv import load_dotenv
4
- from time import sleep
5
- from msfabricpysdkcore.coreapi import FabricClientCore
6
-
7
- load_dotenv()
8
-
9
- class TestFabricClientCore(unittest.TestCase):
10
-
11
- def __init__(self, *args, **kwargs):
12
- super(TestFabricClientCore, self).__init__(*args, **kwargs)
13
- #load_dotenv()
14
- self.fc = FabricClientCore()
15
-
16
- def test_workspace_managed_private_endpoints(self):
17
-
18
- fc = self.fc
19
- datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
20
-
21
- mpes = fc.list_workspace_managed_private_endpoints(workspace_id='535910f6-abe1-499d-94ad-f698c28805fb')
22
-
23
- if len(mpes) > 0:
24
-
25
- for mpe in mpes:
26
- status_code = fc.delete_workspace_managed_private_endpoint(workspace_id='535910f6-abe1-499d-94ad-f698c28805fb',
27
- managed_private_endpoint_id=mpe["id"])
28
- self.assertEqual(status_code, 200)
29
- sleep(60)
30
-
31
- mpe = fc.create_workspace_managed_private_endpoint(workspace_id='535910f6-abe1-499d-94ad-f698c28805fb',
32
- name = f'testmpe{datetime_str}',
33
- target_private_link_resource_id = '/subscriptions/c77cc8fc-43bb-4d44-bdc5-6e20511ed2a8/resourceGroups/fabricdemo/providers/Microsoft.Storage/storageAccounts/publicfabricdemo9039',
34
- target_subresource_type = 'dfs',
35
- request_message = 'testmessage')
36
-
37
- mpes = fc.list_workspace_managed_private_endpoints(workspace_id='535910f6-abe1-499d-94ad-f698c28805fb')
38
-
39
- self.assertIsNotNone(mpes)
40
- self.assertGreater(len(mpes), 0)
41
-
42
- mpe2 = fc.get_workspace_managed_private_endpoint(workspace_id='535910f6-abe1-499d-94ad-f698c28805fb',
43
- managed_private_endpoint_id=mpe["id"])
44
-
45
- self.assertEqual(mpe2["id"], mpe["id"])
46
-
47
- self.assertIsNotNone(mpe2["connectionState"])
48
- self.assertIn("targetPrivateLinkResourceId", mpe2)
49
- self.assertEqual(mpe2["targetPrivateLinkResourceId"], "/subscriptions/c77cc8fc-43bb-4d44-bdc5-6e20511ed2a8/resourceGroups/fabricdemo/providers/Microsoft.Storage/storageAccounts/publicfabricdemo9039")
50
-
51
- for _ in range(0, 20):
52
- if mpe2["connectionState"]["status"] != "Pending":
53
- sleep(30)
54
- else:
55
- status_code = fc.delete_workspace_managed_private_endpoint(workspace_id='535910f6-abe1-499d-94ad-f698c28805fb',
56
- managed_private_endpoint_id=mpe["id"])
57
- self.assertEqual(status_code, 200)
58
- break
59
- mpe2 = fc.get_workspace_managed_private_endpoint(workspace_id='535910f6-abe1-499d-94ad-f698c28805fb',
60
- managed_private_endpoint_id=mpe["id"])
61
-
@@ -1,80 +0,0 @@
1
- import unittest
2
- from datetime import datetime
3
- from dotenv import load_dotenv
4
- from time import sleep
5
- from msfabricpysdkcore.coreapi import FabricClientCore
6
-
7
- load_dotenv()
8
-
9
- class TestFabricClientCore(unittest.TestCase):
10
-
11
- def __init__(self, *args, **kwargs):
12
- super(TestFabricClientCore, self).__init__(*args, **kwargs)
13
- #load_dotenv()
14
- self.fc = FabricClientCore()
15
-
16
- def test_mirrored_database(self):
17
-
18
- fc = self.fc
19
- workspace_id = '46425c13-5736-4285-972c-6d034020f3ff'
20
- datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
21
-
22
- mirrored_db_name = "mirrored_db" + datetime_str
23
-
24
- # mirrored_db_w_content = fc.get_mirrored_database(workspace_id, mirrored_database_name="dfsdemo")
25
-
26
- # status = fc.get_mirroring_status(workspace_id=workspace_id, mirrored_database_id=mirrored_db_w_content.id)
27
- # self.assertIsNotNone(status)
28
- # self.assertIn("status", status)
29
-
30
- # status = status["status"]
31
-
32
- # if status == 'Running':
33
- # fc.stop_mirroring(workspace_id=workspace_id, mirrored_database_id=mirrored_db_w_content.id)
34
- # sleep(60)
35
- # fc.start_mirroring(workspace_id=workspace_id, mirrored_database_id=mirrored_db_w_content.id)
36
- # else:
37
- # fc.start_mirroring(workspace_id=workspace_id, mirrored_database_id=mirrored_db_w_content.id)
38
- # sleep(60)
39
- # fc.stop_mirroring(workspace_id=workspace_id, mirrored_database_id=mirrored_db_w_content.id)
40
-
41
- # table_status = fc.get_tables_mirroring_status(workspace_id=workspace_id, mirrored_database_id=mirrored_db_w_content.id)
42
-
43
- # self.assertIsNotNone(table_status)
44
- # self.assertIn("data", table_status)
45
- # for _ in range(5):
46
- # if len(table_status["data"]) > 0:
47
- # break
48
- # sleep(60)
49
- # table_status = fc.get_tables_mirroring_status(workspace_id=workspace_id, mirrored_database_id=mirrored_db_w_content.id)
50
- # self.assertIn("sourceTableName", table_status["data"][0])
51
-
52
- # fc.stop_mirroring(workspace_id=workspace_id, mirrored_database_id=mirrored_db_w_content.id)
53
-
54
- # definition = fc.get_mirrored_database_definition(workspace_id, mirrored_db_w_content.id)
55
- # self.assertIsNotNone(definition)
56
- # self.assertIn("definition", definition)
57
- # self.assertIn("parts", definition["definition"])
58
-
59
- # mirrored_db = fc.create_mirrored_database(workspace_id, display_name=mirrored_db_name)
60
-
61
- # mirrored_db_check = fc.get_mirrored_database(workspace_id, mirrored_database_id=mirrored_db.id)
62
- # self.assertEqual(mirrored_db_check.display_name, mirrored_db_name)
63
- # self.assertIsNotNone(mirrored_db_check.id)
64
- # self.assertEqual(mirrored_db_check.id, mirrored_db_check.id)
65
-
66
- # mirrored_dbs = fc.list_mirrored_databases(workspace_id)
67
- # mirrored_db_names = [md.display_name for md in mirrored_dbs]
68
- # self.assertGreater(len(mirrored_dbs), 0)
69
- # self.assertIn(mirrored_db_name, mirrored_db_names)
70
-
71
- # sleep(60)
72
-
73
- # mirrored_db_2 = fc.update_mirrored_database(workspace_id, mirrored_db_check.id,
74
- # display_name=f"u{mirrored_db_name}", return_item=True)
75
- # mirrored_db_2 = fc.get_mirrored_database(workspace_id, mirrored_database_id=mirrored_db_2.id)
76
-
77
- # self.assertEqual(mirrored_db_2.display_name, f"u{mirrored_db_name}")
78
-
79
- # status_code = fc.delete_mirrored_database(workspace_id, mirrored_db_2.id)
80
- # self.assertEqual(status_code, 200)