msfabricpysdkcore 0.0.9__py3-none-any.whl → 0.0.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. msfabricpysdkcore/admin_item.py +7 -0
  2. msfabricpysdkcore/admin_workspace.py +20 -1
  3. msfabricpysdkcore/adminapi.py +133 -7
  4. msfabricpysdkcore/auth.py +9 -6
  5. msfabricpysdkcore/client.py +5 -4
  6. msfabricpysdkcore/coreapi.py +341 -17
  7. msfabricpysdkcore/deployment_pipeline.py +240 -0
  8. msfabricpysdkcore/environment.py +209 -0
  9. msfabricpysdkcore/item.py +12 -11
  10. msfabricpysdkcore/lakehouse.py +42 -1
  11. msfabricpysdkcore/long_running_operation.py +2 -6
  12. msfabricpysdkcore/otheritems.py +122 -3
  13. msfabricpysdkcore/spark_custom_pool.py +118 -0
  14. msfabricpysdkcore/tests/test_admin_apis.py +20 -9
  15. msfabricpysdkcore/tests/test_datapipelines.py +48 -0
  16. msfabricpysdkcore/tests/test_deployment_pipeline.py +64 -0
  17. msfabricpysdkcore/tests/test_domains.py +3 -2
  18. msfabricpysdkcore/tests/test_environments.py +65 -0
  19. msfabricpysdkcore/tests/test_evenstreams.py +44 -0
  20. msfabricpysdkcore/tests/test_git.py +3 -1
  21. msfabricpysdkcore/tests/test_items_incl_lakehouse.py +81 -109
  22. msfabricpysdkcore/tests/test_jobs.py +4 -0
  23. msfabricpysdkcore/tests/test_kqldatabases.py +48 -0
  24. msfabricpysdkcore/tests/test_shortcuts.py +3 -1
  25. msfabricpysdkcore/tests/test_spark.py +91 -0
  26. msfabricpysdkcore/tests/test_workspaces_capacities.py +6 -5
  27. msfabricpysdkcore/workspace.py +358 -32
  28. {msfabricpysdkcore-0.0.9.dist-info → msfabricpysdkcore-0.0.11.dist-info}/METADATA +82 -32
  29. msfabricpysdkcore-0.0.11.dist-info/RECORD +38 -0
  30. msfabricpysdkcore-0.0.9.dist-info/RECORD +0 -29
  31. {msfabricpysdkcore-0.0.9.dist-info → msfabricpysdkcore-0.0.11.dist-info}/LICENSE +0 -0
  32. {msfabricpysdkcore-0.0.9.dist-info → msfabricpysdkcore-0.0.11.dist-info}/WHEEL +0 -0
  33. {msfabricpysdkcore-0.0.9.dist-info → msfabricpysdkcore-0.0.11.dist-info}/top_level.txt +0 -0
@@ -1,16 +1,17 @@
1
1
  import unittest
2
- #from dotenv import load_dotenv
2
+ from dotenv import load_dotenv
3
3
  from msfabricpysdkcore import FabricClientAdmin
4
4
 
5
+ load_dotenv()
5
6
 
6
7
  class TestFabricClientCore(unittest.TestCase):
7
8
 
8
9
  def __init__(self, *args, **kwargs):
9
10
  super(TestFabricClientCore, self).__init__(*args, **kwargs)
10
-
11
+ self.fca = FabricClientAdmin()
11
12
 
12
- def test_domains(self):
13
- fca = FabricClientAdmin()
13
+ def test_admin_api(self):
14
+ fca = self.fca
14
15
 
15
16
  user_id = 'b4f4e299-e6e1-4667-886c-57e4a8dde1c2'
16
17
 
@@ -26,24 +27,24 @@ class TestFabricClientCore(unittest.TestCase):
26
27
 
27
28
  # Get workspace access details
28
29
 
29
- ws_access = fca.get_workspace_access_details(ws.id)
30
+ ws_access = fca.list_workspace_access_details(ws.id)
30
31
  principials = ws_access["accessDetails"]
31
32
  principials_ids = [p["principal"]["id"] for p in principials]
32
33
  self.assertIn(user_id, principials_ids)
33
34
 
34
35
  # Get access entities
35
36
 
36
- access_entities = fca.get_access_entities(user_id, type="Notebook")
37
+ access_entities = fca.list_access_entities(user_id, type="Notebook")
37
38
  self.assertGreater(len(access_entities), 0)
38
39
 
39
40
  # Get tenant settings
40
41
 
41
- tenant_settings = fca.get_tenant_settings()
42
+ tenant_settings = fca.list_tenant_settings()
42
43
  self.assertGreater(len(tenant_settings["tenantSettings"]), 0)
43
44
 
44
45
  # Get capacity tenant settings overrides
45
46
 
46
- overrides = fca.get_capacities_tenant_settings_overrides()
47
+ overrides = fca.list_capacities_tenant_settings_overrides()
47
48
  self.assertGreater(len(overrides), -1)
48
49
 
49
50
  # List items
@@ -58,7 +59,7 @@ class TestFabricClientCore(unittest.TestCase):
58
59
 
59
60
  # Get item access details
60
61
 
61
- item_access = fca.get_item_access_details(workspace_id=ws.id, item_id=item_list[0].id)
62
+ item_access = fca.list_item_access_details(workspace_id=ws.id, item_id=item_list[0].id)
62
63
  principials = item_access["accessDetails"]
63
64
 
64
65
  principials_ids = [p["principal"]["id"] for p in principials]
@@ -66,3 +67,13 @@ class TestFabricClientCore(unittest.TestCase):
66
67
  self.assertIn(user_id, principials_ids)
67
68
 
68
69
 
70
+ def test_labels(self):
71
+
72
+ fca = self.fca
73
+
74
+ items = [{"id": "d417b834-d381-454c-9cf0-c491f69508de", "type": "Lakehouse"}]
75
+ label_id = "defa4170-0d19-0005-000a-bc88714345d2"
76
+ resp = fca.bulk_set_labels(items=items, label_id=label_id)
77
+ self.assertEqual(resp["itemsChangeLabelStatus"][0]["status"], "Succeeded")
78
+ resp = fca.bulk_remove_labels(items=items)
79
+ self.assertEqual(resp["itemsChangeLabelStatus"][0]["status"], "Succeeded")
@@ -0,0 +1,48 @@
1
+ import unittest
2
+ from datetime import datetime
3
+ from dotenv import load_dotenv
4
+ from time import sleep
5
+ from msfabricpysdkcore.coreapi import FabricClientCore
6
+
7
+ load_dotenv()
8
+
9
+ class TestFabricClientCore(unittest.TestCase):
10
+
11
+ def __init__(self, *args, **kwargs):
12
+ super(TestFabricClientCore, self).__init__(*args, **kwargs)
13
+ #load_dotenv()
14
+ self.fc = FabricClientCore()
15
+
16
+ def test_data_pipelines(self):
17
+
18
+ fc = self.fc
19
+ workspace_id = 'd8a5abe0-9eed-406d-ab46-343bc57ddbe5'
20
+
21
+ dps = fc.list_data_pipelines(workspace_id)
22
+ dp_names = [dp.display_name for dp in dps]
23
+ self.assertGreater(len(dps), 0)
24
+ self.assertIn("pipeline1", dp_names)
25
+
26
+ dp = fc.get_data_pipeline(workspace_id, data_pipeline_name="pipeline1")
27
+ self.assertIsNotNone(dp.id)
28
+ self.assertIsNotNone(dp.definition)
29
+ self.assertEqual(dp.display_name, "pipeline1")
30
+
31
+ dp_new = fc.create_data_pipeline(workspace_id, display_name="pipeline_new", description="asda")
32
+ dp_new.update_definition(dp.definition)
33
+
34
+ self.assertEqual(dp_new.display_name, "pipeline_new")
35
+
36
+ dp2 = fc.update_data_pipeline(workspace_id, dp.id, display_name="pipeline2")
37
+
38
+ dp = fc.get_data_pipeline(workspace_id, data_pipeline_id=dp.id)
39
+ self.assertEqual(dp.display_name, "pipeline2")
40
+ self.assertEqual(dp.id, dp2.id)
41
+
42
+ dp2 = fc.update_data_pipeline(workspace_id, dp.id, display_name="pipeline1")
43
+
44
+ dp = fc.get_data_pipeline(workspace_id, data_pipeline_id=dp.id)
45
+ self.assertEqual(dp.display_name, "pipeline1")
46
+ self.assertEqual(dp.id, dp2.id)
47
+ status_code = fc.delete_data_pipeline(workspace_id, dp_new.id)
48
+ self.assertEqual(status_code, 200)
@@ -0,0 +1,64 @@
1
+ import unittest
2
+ from msfabricpysdkcore.coreapi import FabricClientCore
3
+ from dotenv import load_dotenv
4
+
5
+ load_dotenv()
6
+
7
+
8
+ class TestFabricClientCore(unittest.TestCase):
9
+
10
+ def __init__(self, *args, **kwargs):
11
+ super(TestFabricClientCore, self).__init__(*args, **kwargs)
12
+ self.fc = FabricClientCore()
13
+
14
+
15
+
16
+ def test_spark_workspace_custom_pools(self):
17
+ fc = self.fc
18
+
19
+ dep_pipes = fc.list_deployment_pipelines()
20
+
21
+ self.assertGreater(len(dep_pipes), 0)
22
+
23
+ self.assertIn("sdkpipe", [pipe.display_name for pipe in dep_pipes])
24
+
25
+ for pipe in dep_pipes:
26
+ if pipe.display_name == 'sdkpipe':
27
+ pipe_id = pipe.id
28
+ break
29
+
30
+ pipe = fc.get_deployment_pipeline(pipe_id)
31
+
32
+ self.assertEqual(pipe.display_name, 'sdkpipe')
33
+ self.assertEqual(pipe.id, pipe_id)
34
+
35
+ stages = fc.get_deployment_pipeline_stages(pipe_id)
36
+
37
+ self.assertGreater(len(stages), 0)
38
+ names = [stage.display_name for stage in stages]
39
+ self.assertIn("Development", names)
40
+ self.assertIn("Production", names)
41
+
42
+ dev_stage = [stage for stage in stages if stage.display_name == "Development"][0]
43
+ prod_stage = [stage for stage in stages if stage.display_name == "Production"][0]
44
+
45
+ items = fc.get_deployment_pipeline_stages_items(pipeline_id=pipe_id, stage_id=dev_stage.id)
46
+
47
+ self.assertGreater(len(items), 0)
48
+ self.assertIn("cicdlakehouse", [item["itemDisplayName"] for item in items])
49
+
50
+ items = [item for item in dev_stage.get_items() if item["itemDisplayName"] == 'cicdlakehouse']
51
+ item = items[0]
52
+ item = {"sourceItemId": item["itemId"],
53
+ "itemType": item["itemType"]}
54
+ items = [item]
55
+
56
+
57
+ response = pipe.deploy(source_stage_id=dev_stage.id,target_stage_id=prod_stage.id, items=items)
58
+
59
+ self.assertEqual(response["status"], "Succeeded")
60
+
61
+
62
+
63
+
64
+
@@ -1,9 +1,10 @@
1
1
  import unittest
2
- #from dotenv import load_dotenv
3
- from datetime import datetime
2
+ from dotenv import load_dotenv
4
3
  from msfabricpysdkcore import FabricClientCore, FabricClientAdmin
5
4
 
6
5
 
6
+ load_dotenv()
7
+
7
8
  class TestFabricClientCore(unittest.TestCase):
8
9
 
9
10
  def __init__(self, *args, **kwargs):
@@ -0,0 +1,65 @@
1
+ import unittest
2
+ from datetime import datetime
3
+ from dotenv import load_dotenv
4
+ from time import sleep
5
+ from msfabricpysdkcore.coreapi import FabricClientCore
6
+
7
+ load_dotenv()
8
+
9
+ class TestFabricClientCore(unittest.TestCase):
10
+
11
+ def __init__(self, *args, **kwargs):
12
+ super(TestFabricClientCore, self).__init__(*args, **kwargs)
13
+ self.fc = FabricClientCore()
14
+
15
+ def test_environments_crudl(self):
16
+ fc = self.fc
17
+ workspace_id = 'd8a5abe0-9eed-406d-ab46-343bc57ddbe5'
18
+ datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
19
+
20
+ env_name = "env" + datetime_str
21
+ environment1 = fc.create_environment(workspace_id, display_name=env_name)
22
+ self.assertEqual(environment1.display_name, env_name)
23
+
24
+ environments = fc.list_environments(workspace_id)
25
+ environment_names = [env.display_name for env in environments]
26
+ self.assertGreater(len(environments), 0)
27
+ self.assertIn(env_name, environment_names)
28
+
29
+ env = fc.get_environment(workspace_id, environment_name=env_name)
30
+ self.assertIsNotNone(env.id)
31
+ self.assertEqual(env.display_name, env_name)
32
+ new_name = env_name + "2"
33
+ env2 = fc.update_environment(workspace_id, env.id, display_name=new_name)
34
+
35
+ env = fc.get_environment(workspace_id, environment_id=env.id)
36
+ self.assertEqual(env.display_name, new_name)
37
+ self.assertEqual(env.id, env2.id)
38
+
39
+ status_code = fc.delete_environment(workspace_id, env.id)
40
+ self.assertEqual(status_code, 200)
41
+
42
+ def test_environment_details(self):
43
+ fc = FabricClientCore()
44
+ workspace_id = 'd8a5abe0-9eed-406d-ab46-343bc57ddbe5'
45
+ environment_id = 'fae6d1a7-d671-4091-89b1-f42626deb56f'
46
+ published_settings = fc.get_published_settings(workspace_id=workspace_id, environment_id=environment_id)
47
+ self.assertIsNotNone(published_settings)
48
+ self.assertIn("instancePool", published_settings)
49
+ self.assertIn("dynamicExecutorAllocation", published_settings)
50
+ staging_settings = fc.get_staging_settings(workspace_id=workspace_id, environment_id=environment_id)
51
+ self.assertIsNotNone(staging_settings)
52
+ self.assertIn("instancePool", staging_settings)
53
+ self.assertIn("dynamicExecutorAllocation", staging_settings)
54
+ if staging_settings["driverCores"] == 8:
55
+ driver_cores = 4
56
+ else:
57
+ driver_cores = 8
58
+ updated_settings = fc.update_staging_settings(workspace_id=workspace_id, environment_id=environment_id, driver_cores=driver_cores)
59
+ self.assertIn("instancePool", updated_settings)
60
+ self.assertIn("dynamicExecutorAllocation", updated_settings)
61
+ self.assertEqual(updated_settings["driverCores"], driver_cores)
62
+ updated_settings = fc.get_staging_settings(workspace_id=workspace_id, environment_id=environment_id)
63
+ self.assertIn("instancePool", updated_settings)
64
+ self.assertIn("dynamicExecutorAllocation", updated_settings)
65
+ self.assertEqual(updated_settings["driverCores"], driver_cores)
@@ -0,0 +1,44 @@
1
+ import unittest
2
+ from datetime import datetime
3
+ from dotenv import load_dotenv
4
+ from time import sleep
5
+ from msfabricpysdkcore.coreapi import FabricClientCore
6
+
7
+ load_dotenv()
8
+
9
+ class TestFabricClientCore(unittest.TestCase):
10
+
11
+ def __init__(self, *args, **kwargs):
12
+ super(TestFabricClientCore, self).__init__(*args, **kwargs)
13
+ #load_dotenv()
14
+ self.fc = FabricClientCore()
15
+
16
+ def test_eventstreams(self):
17
+
18
+ fc = self.fc
19
+ workspace_id = 'd8a5abe0-9eed-406d-ab46-343bc57ddbe5'
20
+
21
+ datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
22
+ es_name = "es" + datetime_str
23
+
24
+ eventstream = fc.create_eventstream(workspace_id, display_name=es_name)
25
+ self.assertEqual(eventstream.display_name, es_name)
26
+
27
+ eventstreams = fc.list_eventstreams(workspace_id)
28
+ eventstream_names = [es.display_name for es in eventstreams]
29
+ self.assertGreater(len(eventstreams), 0)
30
+ self.assertIn(es_name, eventstream_names)
31
+
32
+
33
+ es = fc.get_eventstream(workspace_id, eventstream_name=es_name)
34
+ self.assertIsNotNone(es.id)
35
+ self.assertEqual(es.display_name, es_name)
36
+
37
+ es2 = fc.update_eventstream(workspace_id, es.id, display_name=f"{es_name}2")
38
+
39
+ es = fc.get_eventstream(workspace_id, eventstream_id=es.id)
40
+ self.assertEqual(es.display_name, f"{es_name}2")
41
+ self.assertEqual(es.id, es2.id)
42
+
43
+ status_code = fc.delete_eventstream(workspace_id, es.id)
44
+ self.assertEqual(status_code, 200)
@@ -1,7 +1,9 @@
1
1
  import unittest
2
2
  from msfabricpysdkcore.coreapi import FabricClientCore
3
3
  from datetime import datetime
4
- #from dotenv import load_dotenv
4
+ from dotenv import load_dotenv
5
+
6
+ load_dotenv()
5
7
 
6
8
  class TestFabricClientCore(unittest.TestCase):
7
9
 
@@ -1,9 +1,11 @@
1
1
  import unittest
2
2
  from datetime import datetime
3
- #from dotenv import load_dotenv
3
+ from dotenv import load_dotenv
4
4
  from time import sleep
5
5
  from msfabricpysdkcore.coreapi import FabricClientCore
6
6
 
7
+ load_dotenv()
8
+
7
9
  class TestFabricClientCore(unittest.TestCase):
8
10
 
9
11
  def __init__(self, *args, **kwargs):
@@ -76,8 +78,7 @@ class TestFabricClientCore(unittest.TestCase):
76
78
 
77
79
  fc = self.fc
78
80
 
79
- workspace = fc.get_workspace_by_name("testitems")
80
- workspace_id = workspace.id
81
+ workspace_id = 'd8a5abe0-9eed-406d-ab46-343bc57ddbe5'
81
82
 
82
83
  list_dashboards = fc.list_dashboards(workspace_id)
83
84
  dashboard_names = [dashboard.display_name for dashboard in list_dashboards]
@@ -104,24 +105,45 @@ class TestFabricClientCore(unittest.TestCase):
104
105
 
105
106
  lakehouse = self.fc.get_item(workspace_id=self.workspace_id, item_name="lakehouse1", item_type="Lakehouse")
106
107
  self.assertIsNotNone(lakehouse.properties)
107
- item_id = lakehouse.id
108
+ lakehouse_id = lakehouse.id
109
+ workspace_id = self.workspace_id
108
110
  date_str = datetime.now().strftime("%Y%m%d%H%M%S")
109
111
  table_name = f"table{date_str}"
110
112
 
111
113
 
112
- status_code = self.fc.load_table(workspace_id=self.workspace_id, item_id=item_id, table_name=table_name,
114
+ status_code = self.fc.load_table(workspace_id=self.workspace_id, lakehouse_id=lakehouse_id, table_name=table_name,
113
115
  path_type="File", relative_path="Files/folder1/titanic.csv")
114
116
 
115
117
  self.assertEqual(status_code, 202)
116
118
 
117
- table_list = self.fc.list_tables(workspace_id=self.workspace_id, item_id=item_id)
119
+ # Run on demand table maintenance
120
+ table_name_maintenance = "table20240515114529"
121
+
122
+ execution_data = {
123
+ "tableName": table_name_maintenance,
124
+ "optimizeSettings": {
125
+ "vOrder": True,
126
+ "zOrderBy": [
127
+ "tipAmount"
128
+ ]
129
+ },
130
+ "vacuumSettings": {
131
+ "retentionPeriod": "7:01:00:00"
132
+ }
133
+ }
134
+
135
+ response = self.fc.run_on_demand_table_maintenance(workspace_id=workspace_id, lakehouse_id=lakehouse_id,
136
+ execution_data = execution_data,
137
+ job_type = "TableMaintenance", wait_for_completion = True)
138
+ self.assertIn(response.status_code, [200, 202])
139
+
140
+ table_list = self.fc.list_tables(workspace_id=self.workspace_id, lakehouse_id=lakehouse_id)
118
141
  table_names = [table["name"] for table in table_list]
119
142
 
120
143
  self.assertIn(table_name, table_names)
121
144
 
122
145
  fc = self.fc
123
- workspace = fc.get_workspace_by_name("testitems")
124
- workspace_id = workspace.id
146
+ workspace_id = 'd8a5abe0-9eed-406d-ab46-343bc57ddbe5'
125
147
 
126
148
  lakehouse = fc.create_lakehouse(workspace_id=workspace_id, display_name="lakehouse2")
127
149
  self.assertIsNotNone(lakehouse.id)
@@ -147,101 +169,37 @@ class TestFabricClientCore(unittest.TestCase):
147
169
  self.assertEqual(status_code, 200)
148
170
 
149
171
 
150
- def test_data_pipelines(self):
151
-
152
- fc = self.fc
153
- workspace = fc.get_workspace_by_name("testitems")
154
- workspace_id = workspace.id
155
-
156
- dps = fc.list_data_pipelines(workspace_id)
157
- dp_names = [dp.display_name for dp in dps]
158
- self.assertGreater(len(dps), 0)
159
- self.assertIn("pipeline1", dp_names)
160
-
161
- dp = fc.get_data_pipeline(workspace_id, data_pipeline_name="pipeline1")
162
- self.assertIsNotNone(dp.id)
163
- self.assertIsNotNone(dp.definition)
164
- self.assertEqual(dp.display_name, "pipeline1")
165
-
166
- dp2 = fc.update_data_pipeline(workspace_id, dp.id, display_name="pipeline2")
167
-
168
- dp = fc.get_data_pipeline(workspace_id, data_pipeline_id=dp.id)
169
- self.assertEqual(dp.display_name, "pipeline2")
170
- self.assertEqual(dp.id, dp2.id)
171
-
172
- dp2 = fc.update_data_pipeline(workspace_id, dp.id, display_name="pipeline1")
173
-
174
- dp = fc.get_data_pipeline(workspace_id, data_pipeline_id=dp.id)
175
- self.assertEqual(dp.display_name, "pipeline1")
176
- self.assertEqual(dp.id, dp2.id)
177
- # status_code = fc.delete_data_pipeline(workspace_id, dp.id)
178
- # self.assertEqual(status_code, 200)
179
-
180
- def test_eventstreams(self):
181
-
172
+ def test_eventhouses(self):
173
+
182
174
  fc = self.fc
183
- workspace = fc.get_workspace_by_name("testitems")
184
- workspace_id = workspace.id
185
-
186
- datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
187
- es_name = "es" + datetime_str
188
-
189
- eventstream = fc.create_eventstream(workspace_id, display_name=es_name)
190
- self.assertEqual(eventstream.display_name, es_name)
191
-
192
- eventstreams = fc.list_eventstreams(workspace_id)
193
- eventstream_names = [es.display_name for es in eventstreams]
194
- self.assertGreater(len(eventstreams), 0)
195
- self.assertIn(es_name, eventstream_names)
175
+ workspace_id = 'd8a5abe0-9eed-406d-ab46-343bc57ddbe5'
196
176
 
177
+ eventhouse1 = fc.create_eventhouse(workspace_id, display_name="eventhouse1")
178
+ self.assertEqual(eventhouse1.display_name, "eventhouse1")
197
179
 
198
- es = fc.get_eventstream(workspace_id, eventstream_name=es_name)
199
- self.assertIsNotNone(es.id)
200
- self.assertEqual(es.display_name, es_name)
180
+ eventhouses = fc.list_eventhouses(workspace_id)
181
+ eventhouse_names = [eh.display_name for eh in eventhouses]
182
+ self.assertGreater(len(eventhouses), 0)
183
+ self.assertIn("eventhouse1", eventhouse_names)
201
184
 
202
- es2 = fc.update_eventstream(workspace_id, es.id, display_name=f"{es_name}2")
185
+ eh = fc.get_eventhouse(workspace_id, eventhouse_name="eventhouse1")
186
+ self.assertIsNotNone(eh.id)
187
+ self.assertEqual(eh.display_name, "eventhouse1")
203
188
 
204
- es = fc.get_eventstream(workspace_id, eventstream_id=es.id)
205
- self.assertEqual(es.display_name, f"{es_name}2")
206
- self.assertEqual(es.id, es2.id)
189
+ eh2 = fc.update_eventhouse(workspace_id, eh.id, display_name="eventhouse2")
207
190
 
208
- status_code = fc.delete_eventstream(workspace_id, es.id)
209
- self.assertEqual(status_code, 200)
210
-
211
- def test_kql_database(self):
212
-
213
- fc = self.fc
214
- workspace = fc.get_workspace_by_name("testitems")
215
- workspace_id = workspace.id
216
-
217
- kql_databases = fc.list_kql_databases(workspace_id)
218
- kql_database_names = [kqldb.display_name for kqldb in kql_databases]
219
- self.assertGreater(len(kql_databases), 0)
220
- self.assertIn("kqldatabase1", kql_database_names)
191
+ eh = fc.get_eventhouse(workspace_id, eventhouse_id=eh.id)
192
+ self.assertEqual(eh.display_name, "eventhouse2")
193
+ self.assertEqual(eh.id, eh2.id)
221
194
 
222
- kqldb = fc.get_kql_database(workspace_id, kql_database_name="kqldatabase1")
223
- self.assertIsNotNone(kqldb.id)
224
- self.assertEqual(kqldb.display_name, "kqldatabase1")
225
-
226
- kqldb2 = fc.update_kql_database(workspace_id, kqldb.id, display_name="kqldb2")
227
-
228
- kqldb = fc.get_kql_database(workspace_id, kql_database_id=kqldb.id)
229
- self.assertEqual(kqldb.display_name, "kqldb2")
230
- self.assertEqual(kqldb.id, kqldb2.id)
231
-
232
- kqldb2 = fc.update_kql_database(workspace_id, kqldb.id, display_name="kqldatabase1")
195
+ status_code = fc.delete_eventhouse(workspace_id, eh.id)
196
+ self.assertEqual(status_code, 200)
233
197
 
234
- kqldb = fc.get_kql_database(workspace_id, kql_database_id=kqldb.id)
235
- self.assertEqual(kqldb.display_name, "kqldatabase1")
236
- self.assertEqual(kqldb.id, kqldb2.id)
237
- # status_code = fc.delete_kql_database(workspace_id, kqldb.id)
238
- # self.assertEqual(status_code, 200)
239
198
 
240
199
  def test_kql_querysets(self):
241
200
 
242
201
  fc = self.fc
243
- workspace = fc.get_workspace_by_name("testitems")
244
- workspace_id = workspace.id
202
+ workspace_id = 'd8a5abe0-9eed-406d-ab46-343bc57ddbe5'
245
203
 
246
204
  kql_queryset_name = "kqlqueryset1"
247
205
 
@@ -273,8 +231,7 @@ class TestFabricClientCore(unittest.TestCase):
273
231
  def test_ml_experiments(self):
274
232
 
275
233
  fc = self.fc
276
- workspace = fc.get_workspace_by_name("testitems")
277
- workspace_id = workspace.id
234
+ workspace_id = 'd8a5abe0-9eed-406d-ab46-343bc57ddbe5'
278
235
 
279
236
  ml_experiment = fc.create_ml_experiment(workspace_id, display_name="mlexperiment1")
280
237
  self.assertEqual(ml_experiment.display_name, "mlexperiment1")
@@ -300,8 +257,7 @@ class TestFabricClientCore(unittest.TestCase):
300
257
  def test_ml_models(self):
301
258
 
302
259
  fc = self.fc
303
- workspace = fc.get_workspace_by_name("testitems")
304
- workspace_id = workspace.id
260
+ workspace_id = 'd8a5abe0-9eed-406d-ab46-343bc57ddbe5'
305
261
  datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
306
262
  model_name = "mlm" + datetime_str
307
263
 
@@ -329,14 +285,17 @@ class TestFabricClientCore(unittest.TestCase):
329
285
  def test_notebooks(self):
330
286
 
331
287
  fc = self.fc
332
- workspace = fc.get_workspace_by_name("testitems")
333
- workspace_id = workspace.id
288
+ workspace_id = 'd8a5abe0-9eed-406d-ab46-343bc57ddbe5'
334
289
 
335
290
  notebook_name = "notebook125"
336
291
 
337
292
  notebook_w_content = fc.get_notebook(workspace_id, notebook_name="HelloWorld")
338
- definition = notebook_w_content.definition
339
293
 
294
+ definition = fc.get_notebook_definition(workspace_id, notebook_w_content.id)
295
+
296
+ self.assertIsNotNone(definition)
297
+ self.assertIn("definition", definition)
298
+ definition = definition["definition"]
340
299
  notebook = fc.create_notebook(workspace_id, definition=definition, display_name=notebook_name)
341
300
  fc.update_notebook_definition(workspace_id, notebook.id, definition=definition)
342
301
  notebook = fc.get_notebook(workspace_id, notebook_id=notebook.id)
@@ -364,13 +323,18 @@ class TestFabricClientCore(unittest.TestCase):
364
323
  def test_reports(self):
365
324
 
366
325
  fc = self.fc
367
- workspace = fc.get_workspace_by_name("testitems")
368
- workspace_id = workspace.id
326
+ workspace_id = 'd8a5abe0-9eed-406d-ab46-343bc57ddbe5'
369
327
 
370
328
  report_name = "report1234"
371
329
 
372
330
  report_w_content = fc.get_report(workspace_id, report_name="HelloWorldReport")
373
- definition = report_w_content.definition
331
+
332
+ definition = fc.get_report_definition(workspace_id, report_w_content.id)
333
+
334
+ self.assertIsNotNone(definition)
335
+ self.assertIn("definition", definition)
336
+ definition = definition["definition"]
337
+
374
338
  report = fc.create_report(workspace_id, display_name=report_name, definition=definition)
375
339
  fc.update_report_definition(workspace_id, report.id, definition=definition)
376
340
  report = fc.get_report(workspace_id, report_id=report.id)
@@ -392,13 +356,17 @@ class TestFabricClientCore(unittest.TestCase):
392
356
  def test_semantic_models(self):
393
357
 
394
358
  fc = self.fc
395
- workspace = fc.get_workspace_by_name("testitems")
396
- workspace_id = workspace.id
359
+ workspace_id = 'd8a5abe0-9eed-406d-ab46-343bc57ddbe5'
397
360
 
398
361
  semantic_model_name = "semanticmodel1234"
399
362
 
400
363
  semantic_model_w_content = fc.get_semantic_model(workspace_id, semantic_model_name="Table")
401
- definition = semantic_model_w_content.definition
364
+
365
+ definition = fc.get_semantic_model_definition(workspace_id, semantic_model_w_content.id)
366
+
367
+ self.assertIsNotNone(definition)
368
+ self.assertIn("definition", definition)
369
+ definition = definition["definition"]
402
370
  semantic_model = fc.create_semantic_model(workspace_id, display_name=semantic_model_name, definition=definition)
403
371
  fc.update_semantic_model_definition(workspace_id, semantic_model.id, definition=definition)
404
372
  semantic_model = fc.get_semantic_model(workspace_id, semantic_model_id=semantic_model.id)
@@ -421,14 +389,19 @@ class TestFabricClientCore(unittest.TestCase):
421
389
  def test_spark_job_definitions(self):
422
390
 
423
391
  fc = self.fc
424
- workspace = fc.get_workspace_by_name("testitems")
425
- workspace_id = workspace.id
392
+ workspace_id = 'd8a5abe0-9eed-406d-ab46-343bc57ddbe5'
426
393
 
427
394
  datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
428
395
  spark_job_definition_name = f"sjd{datetime_str}"
429
396
 
430
397
  spark_job_definition_w_content = fc.get_spark_job_definition(workspace_id, spark_job_definition_name="helloworld")
431
- definition = spark_job_definition_w_content.definition
398
+
399
+ definition = fc.get_spark_job_definition_definition(workspace_id, spark_job_definition_w_content.id)
400
+
401
+ self.assertIsNotNone(definition)
402
+ self.assertIn("definition", definition)
403
+ definition = definition["definition"]
404
+
432
405
  spark_job_definition = fc.create_spark_job_definition(workspace_id, display_name=spark_job_definition_name)
433
406
  fc.update_spark_job_definition_definition(workspace_id, spark_job_definition.id, definition=definition)
434
407
  spark_job_definition = fc.get_spark_job_definition(workspace_id, spark_job_definition_id=spark_job_definition.id)
@@ -454,8 +427,7 @@ class TestFabricClientCore(unittest.TestCase):
454
427
  def test_warehouses(self):
455
428
 
456
429
  fc = self.fc
457
- workspace = fc.get_workspace_by_name("testitems")
458
- workspace_id = workspace.id
430
+ workspace_id = 'd8a5abe0-9eed-406d-ab46-343bc57ddbe5'
459
431
 
460
432
  datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
461
433
  warehouse1 = f"wh{datetime_str}"
@@ -1,5 +1,9 @@
1
1
  import unittest
2
2
  from msfabricpysdkcore.coreapi import FabricClientCore
3
+ from dotenv import load_dotenv
4
+
5
+ load_dotenv()
6
+
3
7
 
4
8
  class TestFabricClientCore(unittest.TestCase):
5
9