msfabricpysdkcore 0.1.3__py3-none-any.whl → 0.1.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- msfabricpysdkcore/adminapi.py +4 -2
- msfabricpysdkcore/auth.py +28 -0
- msfabricpysdkcore/client.py +16 -7
- msfabricpysdkcore/coreapi.py +5 -2
- msfabricpysdkcore/fabric_azure_client.py +6 -3
- {msfabricpysdkcore-0.1.3.dist-info → msfabricpysdkcore-0.1.5.dist-info}/METADATA +6 -2
- msfabricpysdkcore-0.1.5.dist-info/RECORD +28 -0
- {msfabricpysdkcore-0.1.3.dist-info → msfabricpysdkcore-0.1.5.dist-info}/WHEEL +1 -1
- msfabricpysdkcore/tests/__init__.py +0 -0
- msfabricpysdkcore/tests/test_admin_apis.py +0 -98
- msfabricpysdkcore/tests/test_datapipelines.py +0 -45
- msfabricpysdkcore/tests/test_deployment_pipeline.py +0 -63
- msfabricpysdkcore/tests/test_domains.py +0 -126
- msfabricpysdkcore/tests/test_environments.py +0 -114
- msfabricpysdkcore/tests/test_evenhouses.py +0 -47
- msfabricpysdkcore/tests/test_evenstreams.py +0 -44
- msfabricpysdkcore/tests/test_external_data_shares.py +0 -51
- msfabricpysdkcore/tests/test_fabric_azure_client.py +0 -78
- msfabricpysdkcore/tests/test_git.py +0 -63
- msfabricpysdkcore/tests/test_items.py +0 -81
- msfabricpysdkcore/tests/test_items_incl_lakehouse.py +0 -418
- msfabricpysdkcore/tests/test_jobs.py +0 -43
- msfabricpysdkcore/tests/test_kql_queryset.py +0 -49
- msfabricpysdkcore/tests/test_kqldatabases.py +0 -48
- msfabricpysdkcore/tests/test_lakehouse.py +0 -84
- msfabricpysdkcore/tests/test_ml_experiments.py +0 -47
- msfabricpysdkcore/tests/test_ml_models.py +0 -47
- msfabricpysdkcore/tests/test_notebooks.py +0 -57
- msfabricpysdkcore/tests/test_one_lake_data_access_security.py +0 -63
- msfabricpysdkcore/tests/test_other_items.py +0 -45
- msfabricpysdkcore/tests/test_reports.py +0 -52
- msfabricpysdkcore/tests/test_semantic_model.py +0 -50
- msfabricpysdkcore/tests/test_shortcuts.py +0 -55
- msfabricpysdkcore/tests/test_spark.py +0 -91
- msfabricpysdkcore/tests/test_sparkjobdefinition.py +0 -55
- msfabricpysdkcore/tests/test_warehouses.py +0 -50
- msfabricpysdkcore/tests/test_workspaces_capacities.py +0 -159
- msfabricpysdkcore-0.1.3.dist-info/RECORD +0 -57
- {msfabricpysdkcore-0.1.3.dist-info → msfabricpysdkcore-0.1.5.dist-info}/LICENSE +0 -0
- {msfabricpysdkcore-0.1.3.dist-info → msfabricpysdkcore-0.1.5.dist-info}/top_level.txt +0 -0
@@ -1,418 +0,0 @@
|
|
1
|
-
import unittest
|
2
|
-
from datetime import datetime
|
3
|
-
from dotenv import load_dotenv
|
4
|
-
from time import sleep
|
5
|
-
from msfabricpysdkcore.coreapi import FabricClientCore
|
6
|
-
|
7
|
-
load_dotenv()
|
8
|
-
|
9
|
-
class TestFabricClientCore(unittest.TestCase):
|
10
|
-
|
11
|
-
def __init__(self, *args, **kwargs):
|
12
|
-
super(TestFabricClientCore, self).__init__(*args, **kwargs)
|
13
|
-
#load_dotenv()
|
14
|
-
self.fc = FabricClientCore()
|
15
|
-
self.workspace_id = "c3352d34-0b54-40f0-b204-cc964b1beb8d"
|
16
|
-
|
17
|
-
datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
|
18
|
-
self.item_name = "testitem" + datetime_str
|
19
|
-
self.item_type = "Notebook"
|
20
|
-
|
21
|
-
def test_item_end_to_end(self):
|
22
|
-
|
23
|
-
item = self.fc.create_item(display_name=self.item_name, type=self.item_type, workspace_id=self.workspace_id)
|
24
|
-
self.assertEqual(item.display_name, self.item_name)
|
25
|
-
self.assertEqual(item.type, self.item_type)
|
26
|
-
self.assertEqual(item.workspace_id, self.workspace_id)
|
27
|
-
self.assertEqual(item.description, "")
|
28
|
-
|
29
|
-
item = self.fc.get_item(workspace_id=self.workspace_id, item_id=item.id)
|
30
|
-
item_ = self.fc.get_item(workspace_id=self.workspace_id,
|
31
|
-
item_name=self.item_name, item_type=self.item_type)
|
32
|
-
self.assertEqual(item.id, item_.id)
|
33
|
-
self.assertEqual(item.display_name, self.item_name)
|
34
|
-
self.assertEqual(item.type, self.item_type)
|
35
|
-
self.assertEqual(item.workspace_id, self.workspace_id)
|
36
|
-
self.assertEqual(item.description, "")
|
37
|
-
|
38
|
-
item_list = self.fc.list_items(workspace_id=self.workspace_id)
|
39
|
-
self.assertTrue(len(item_list) > 0)
|
40
|
-
|
41
|
-
item_ids = [item_.id for item_ in item_list]
|
42
|
-
self.assertIn(item.id, item_ids)
|
43
|
-
|
44
|
-
self.fc.update_item(workspace_id=self.workspace_id, item_id=item.id, display_name=f"u{self.item_name}")
|
45
|
-
item = self.fc.get_item(workspace_id=self.workspace_id, item_id=item.id)
|
46
|
-
self.assertEqual(item.display_name, f"u{self.item_name}")
|
47
|
-
|
48
|
-
status_code = self.fc.delete_item(workspace_id=self.workspace_id, item_id=item.id)
|
49
|
-
|
50
|
-
self.assertAlmostEqual(status_code, 200)
|
51
|
-
|
52
|
-
def test_item_definition(self):
|
53
|
-
|
54
|
-
sjd = self.fc.get_item(workspace_id=self.workspace_id, item_name="blubb", item_type="SparkJobDefinition")
|
55
|
-
self.assertIsNotNone(sjd.definition)
|
56
|
-
datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
|
57
|
-
blubb2 = "blubb2" + datetime_str
|
58
|
-
blubb3 = "blubb3" + datetime_str
|
59
|
-
blubb2 = self.fc.create_item(display_name=blubb2, type="SparkJobDefinition", workspace_id=self.workspace_id,
|
60
|
-
definition=sjd.definition)
|
61
|
-
|
62
|
-
blubb3 = self.fc.create_item(display_name=blubb3, type="SparkJobDefinition", workspace_id=self.workspace_id)
|
63
|
-
|
64
|
-
blubb3 = self.fc.update_item_definition(workspace_id=self.workspace_id,
|
65
|
-
item_id=blubb3.id, definition=sjd.definition)
|
66
|
-
|
67
|
-
self.assertEqual(blubb3.definition, sjd.definition)
|
68
|
-
|
69
|
-
self.assertNotEqual(blubb2.id, sjd.id)
|
70
|
-
self.assertEqual(blubb2.definition, sjd.definition)
|
71
|
-
self.assertNotEqual(blubb2.id, blubb3.id)
|
72
|
-
|
73
|
-
blubb2.delete()
|
74
|
-
blubb3.delete()
|
75
|
-
|
76
|
-
|
77
|
-
def test_list_other_items(self):
|
78
|
-
|
79
|
-
fc = self.fc
|
80
|
-
|
81
|
-
workspace_id = 'd8a5abe0-9eed-406d-ab46-343bc57ddbe5'
|
82
|
-
|
83
|
-
list_dashboards = fc.list_dashboards(workspace_id)
|
84
|
-
dashboard_names = [dashboard.display_name for dashboard in list_dashboards]
|
85
|
-
self.assertGreater(len(list_dashboards), 0)
|
86
|
-
self.assertIn("dashboard1", dashboard_names)
|
87
|
-
|
88
|
-
list_datamarts = fc.list_datamarts(workspace_id)
|
89
|
-
datamart_names = [datamart.display_name for datamart in list_datamarts]
|
90
|
-
self.assertGreater(len(list_datamarts), 0)
|
91
|
-
self.assertIn("datamart1", datamart_names)
|
92
|
-
|
93
|
-
list_sql_endpoints = fc.list_sql_endpoints(workspace_id)
|
94
|
-
sqlendpoint_names = [sqlendpoint.display_name for sqlendpoint in list_sql_endpoints]
|
95
|
-
self.assertGreater(len(list_sql_endpoints), 0)
|
96
|
-
self.assertIn("sqlendpointlakehouse", sqlendpoint_names)
|
97
|
-
|
98
|
-
# list_mirrored_warehouses = fc.list_mirrored_warehouses(self.workspace_id)
|
99
|
-
# self.assertGreater(len(list_mirrored_warehouses), 0)
|
100
|
-
|
101
|
-
# list_paginated_reports = fc.list_paginated_reports(self.workspace_id)
|
102
|
-
# self.assertGreater(len(list_paginated_reports), 0)
|
103
|
-
|
104
|
-
def test_lakehouse(self):
|
105
|
-
|
106
|
-
lakehouse = self.fc.get_item(workspace_id=self.workspace_id, item_name="lakehouse1", item_type="Lakehouse")
|
107
|
-
self.assertIsNotNone(lakehouse.properties)
|
108
|
-
lakehouse_id = lakehouse.id
|
109
|
-
workspace_id = self.workspace_id
|
110
|
-
date_str = datetime.now().strftime("%Y%m%d%H%M%S")
|
111
|
-
table_name = f"table{date_str}"
|
112
|
-
|
113
|
-
|
114
|
-
status_code = self.fc.load_table(workspace_id=self.workspace_id, lakehouse_id=lakehouse_id, table_name=table_name,
|
115
|
-
path_type="File", relative_path="Files/folder1/titanic.csv")
|
116
|
-
|
117
|
-
self.assertEqual(status_code, 202)
|
118
|
-
|
119
|
-
# Run on demand table maintenance
|
120
|
-
table_name_maintenance = "table20240515114529"
|
121
|
-
|
122
|
-
execution_data = {
|
123
|
-
"tableName": table_name_maintenance,
|
124
|
-
"optimizeSettings": {
|
125
|
-
"vOrder": True,
|
126
|
-
"zOrderBy": [
|
127
|
-
"tipAmount"
|
128
|
-
]
|
129
|
-
},
|
130
|
-
"vacuumSettings": {
|
131
|
-
"retentionPeriod": "7:01:00:00"
|
132
|
-
}
|
133
|
-
}
|
134
|
-
|
135
|
-
response = self.fc.run_on_demand_table_maintenance(workspace_id=workspace_id, lakehouse_id=lakehouse_id,
|
136
|
-
execution_data = execution_data,
|
137
|
-
job_type = "TableMaintenance", wait_for_completion = True)
|
138
|
-
self.assertIn(response.status_code, [200, 202])
|
139
|
-
|
140
|
-
table_list = self.fc.list_tables(workspace_id=self.workspace_id, lakehouse_id=lakehouse_id)
|
141
|
-
table_names = [table["name"] for table in table_list]
|
142
|
-
|
143
|
-
self.assertIn(table_name, table_names)
|
144
|
-
|
145
|
-
fc = self.fc
|
146
|
-
workspace_id = 'd8a5abe0-9eed-406d-ab46-343bc57ddbe5'
|
147
|
-
|
148
|
-
lakehouse = fc.create_lakehouse(workspace_id=workspace_id, display_name="lakehouse2")
|
149
|
-
self.assertIsNotNone(lakehouse.id)
|
150
|
-
|
151
|
-
lakehouses = fc.list_lakehouses(workspace_id)
|
152
|
-
lakehouse_names = [lh.display_name for lh in lakehouses]
|
153
|
-
self.assertGreater(len(lakehouse_names), 0)
|
154
|
-
self.assertIn("lakehouse2", lakehouse_names)
|
155
|
-
|
156
|
-
lakehouse2 = fc.get_lakehouse(workspace_id=workspace_id, lakehouse_id=lakehouse.id)
|
157
|
-
self.assertEqual(lakehouse.id, lakehouse2.id)
|
158
|
-
|
159
|
-
sleep(20)
|
160
|
-
lakehouse2 = fc.update_lakehouse(workspace_id=workspace_id, lakehouse_id=lakehouse.id, display_name="lakehouse3")
|
161
|
-
self.assertEqual(lakehouse2.display_name, "lakehouse3")
|
162
|
-
|
163
|
-
id = lakehouse2.id
|
164
|
-
|
165
|
-
lakehouse2 = fc.get_lakehouse(workspace_id=workspace_id, lakehouse_name="lakehouse3")
|
166
|
-
self.assertEqual(lakehouse2.id, id)
|
167
|
-
|
168
|
-
status_code = fc.delete_lakehouse(workspace_id=workspace_id, lakehouse_id=lakehouse.id)
|
169
|
-
self.assertEqual(status_code, 200)
|
170
|
-
|
171
|
-
|
172
|
-
def test_eventhouses(self):
|
173
|
-
|
174
|
-
fc = self.fc
|
175
|
-
workspace_id = 'd8a5abe0-9eed-406d-ab46-343bc57ddbe5'
|
176
|
-
datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
|
177
|
-
eventhouse_name = "evh" + datetime_str
|
178
|
-
eventhouse1 = fc.create_eventhouse(workspace_id, display_name=eventhouse_name)
|
179
|
-
self.assertEqual(eventhouse1.display_name, eventhouse_name)
|
180
|
-
|
181
|
-
eventhouses = fc.list_eventhouses(workspace_id)
|
182
|
-
eventhouse_names = [eh.display_name for eh in eventhouses]
|
183
|
-
self.assertGreater(len(eventhouses), 0)
|
184
|
-
self.assertIn(eventhouse_name, eventhouse_names)
|
185
|
-
|
186
|
-
eh = fc.get_eventhouse(workspace_id, eventhouse_name=eventhouse_name)
|
187
|
-
self.assertIsNotNone(eh.id)
|
188
|
-
self.assertEqual(eh.display_name, eventhouse_name)
|
189
|
-
new_display_name = eventhouse_name + "2"
|
190
|
-
eh2 = fc.update_eventhouse(workspace_id, eh.id, display_name=new_display_name)
|
191
|
-
|
192
|
-
eh = fc.get_eventhouse(workspace_id, eventhouse_id=eh.id)
|
193
|
-
self.assertEqual(eh.display_name, new_display_name)
|
194
|
-
self.assertEqual(eh.id, eh2.id)
|
195
|
-
|
196
|
-
status_code = fc.delete_eventhouse(workspace_id, eh.id)
|
197
|
-
self.assertEqual(status_code, 200)
|
198
|
-
|
199
|
-
|
200
|
-
def test_kql_querysets(self):
|
201
|
-
|
202
|
-
fc = self.fc
|
203
|
-
workspace_id = 'd8a5abe0-9eed-406d-ab46-343bc57ddbe5'
|
204
|
-
|
205
|
-
kql_queryset_name = "kqlqueryset1"
|
206
|
-
|
207
|
-
kql_querysets = fc.list_kql_querysets(workspace_id)
|
208
|
-
kql_queryset_names = [kqlq.display_name for kqlq in kql_querysets]
|
209
|
-
self.assertGreater(len(kql_querysets), 0)
|
210
|
-
self.assertIn(kql_queryset_name, kql_queryset_names)
|
211
|
-
|
212
|
-
kqlq = fc.get_kql_queryset(workspace_id, kql_queryset_name=kql_queryset_name)
|
213
|
-
self.assertIsNotNone(kqlq.id)
|
214
|
-
self.assertEqual(kqlq.display_name, kql_queryset_name)
|
215
|
-
|
216
|
-
kqlq2 = fc.update_kql_queryset(workspace_id, kqlq.id, display_name=f"{kql_queryset_name}2")
|
217
|
-
|
218
|
-
kqlq = fc.get_kql_queryset(workspace_id, kql_queryset_id=kqlq.id)
|
219
|
-
self.assertEqual(kqlq.display_name, f"{kql_queryset_name}2")
|
220
|
-
self.assertEqual(kqlq.id, kqlq2.id)
|
221
|
-
|
222
|
-
kqlq2 = fc.update_kql_queryset(workspace_id, kqlq.id, display_name=kql_queryset_name)
|
223
|
-
|
224
|
-
kqlq = fc.get_kql_queryset(workspace_id, kql_queryset_id=kqlq.id)
|
225
|
-
self.assertEqual(kqlq.display_name, kql_queryset_name)
|
226
|
-
self.assertEqual(kqlq.id, kqlq2.id)
|
227
|
-
|
228
|
-
# status_code = fc.delete_kql_queryset(workspace_id, kqlq.id)
|
229
|
-
# self.assertEqual(status_code, 200)
|
230
|
-
|
231
|
-
|
232
|
-
def test_ml_experiments(self):
|
233
|
-
|
234
|
-
fc = self.fc
|
235
|
-
workspace_id = 'd8a5abe0-9eed-406d-ab46-343bc57ddbe5'
|
236
|
-
|
237
|
-
ml_experiment = fc.create_ml_experiment(workspace_id, display_name="mlexperiment1")
|
238
|
-
self.assertEqual(ml_experiment.display_name, "mlexperiment1")
|
239
|
-
|
240
|
-
ml_experiments = fc.list_ml_experiments(workspace_id)
|
241
|
-
ml_experiment_names = [mle.display_name for mle in ml_experiments]
|
242
|
-
self.assertGreater(len(ml_experiments), 0)
|
243
|
-
self.assertIn("mlexperiment1", ml_experiment_names)
|
244
|
-
|
245
|
-
mle = fc.get_ml_experiment(workspace_id, ml_experiment_name="mlexperiment1")
|
246
|
-
self.assertIsNotNone(mle.id)
|
247
|
-
self.assertEqual(mle.display_name, "mlexperiment1")
|
248
|
-
|
249
|
-
mle2 = fc.update_ml_experiment(workspace_id, mle.id, display_name="mlexperiment2")
|
250
|
-
|
251
|
-
mle = fc.get_ml_experiment(workspace_id, ml_experiment_id=mle.id)
|
252
|
-
self.assertEqual(mle.display_name, "mlexperiment2")
|
253
|
-
self.assertEqual(mle.id, mle2.id)
|
254
|
-
|
255
|
-
status_code = fc.delete_ml_experiment(workspace_id, mle.id)
|
256
|
-
self.assertEqual(status_code, 200)
|
257
|
-
|
258
|
-
def test_ml_models(self):
|
259
|
-
|
260
|
-
fc = self.fc
|
261
|
-
workspace_id = 'd8a5abe0-9eed-406d-ab46-343bc57ddbe5'
|
262
|
-
datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
|
263
|
-
model_name = "mlm" + datetime_str
|
264
|
-
|
265
|
-
ml_model = fc.create_ml_model(workspace_id, display_name=model_name)
|
266
|
-
self.assertEqual(ml_model.display_name, model_name)
|
267
|
-
|
268
|
-
ml_models = fc.list_ml_models(workspace_id)
|
269
|
-
ml_model_names = [ml.display_name for ml in ml_models]
|
270
|
-
self.assertGreater(len(ml_models), 0)
|
271
|
-
self.assertIn(model_name, ml_model_names)
|
272
|
-
|
273
|
-
mlm = fc.get_ml_model(workspace_id, ml_model_name=model_name)
|
274
|
-
self.assertIsNotNone(mlm.id)
|
275
|
-
self.assertEqual(mlm.display_name, model_name)
|
276
|
-
|
277
|
-
mlm2 = fc.update_ml_model(workspace_id=workspace_id,ml_model_id= mlm.id, description=model_name)
|
278
|
-
|
279
|
-
mlm = fc.get_ml_model(workspace_id, ml_model_id=mlm.id)
|
280
|
-
self.assertEqual(mlm.description, model_name)
|
281
|
-
self.assertEqual(mlm.id, mlm2.id)
|
282
|
-
|
283
|
-
status_code = fc.delete_ml_model(workspace_id, mlm.id)
|
284
|
-
self.assertEqual(status_code, 200)
|
285
|
-
|
286
|
-
def test_notebooks(self):
|
287
|
-
|
288
|
-
fc = self.fc
|
289
|
-
workspace_id = 'd8a5abe0-9eed-406d-ab46-343bc57ddbe5'
|
290
|
-
|
291
|
-
notebook_name = "notebook125"
|
292
|
-
|
293
|
-
notebook_w_content = fc.get_notebook(workspace_id, notebook_name="HelloWorld")
|
294
|
-
|
295
|
-
definition = fc.get_notebook_definition(workspace_id, notebook_w_content.id)
|
296
|
-
|
297
|
-
self.assertIsNotNone(definition)
|
298
|
-
self.assertIn("definition", definition)
|
299
|
-
definition = definition["definition"]
|
300
|
-
notebook = fc.create_notebook(workspace_id, definition=definition, display_name=notebook_name)
|
301
|
-
fc.update_notebook_definition(workspace_id, notebook.id, definition=definition)
|
302
|
-
notebook = fc.get_notebook(workspace_id, notebook_id=notebook.id)
|
303
|
-
self.assertEqual(notebook.display_name, notebook_name)
|
304
|
-
self.assertIsNotNone(notebook.definition)
|
305
|
-
|
306
|
-
notebooks = fc.list_notebooks(workspace_id)
|
307
|
-
notebook_names = [nb.display_name for nb in notebooks]
|
308
|
-
self.assertGreater(len(notebooks), 0)
|
309
|
-
self.assertIn(notebook_name, notebook_names)
|
310
|
-
|
311
|
-
nb = fc.get_notebook(workspace_id, notebook_name=notebook_name)
|
312
|
-
self.assertIsNotNone(nb.id)
|
313
|
-
self.assertEqual(nb.display_name, notebook_name)
|
314
|
-
|
315
|
-
nb2 = fc.update_notebook(workspace_id, notebook_id=nb.id, display_name=f"{notebook_name}2")
|
316
|
-
|
317
|
-
nb = fc.get_notebook(workspace_id, notebook_id=nb.id)
|
318
|
-
self.assertEqual(nb.display_name, f"{notebook_name}2")
|
319
|
-
self.assertEqual(nb.id, nb2.id)
|
320
|
-
|
321
|
-
status_code = fc.delete_notebook(workspace_id, nb.id)
|
322
|
-
self.assertEqual(status_code, 200)
|
323
|
-
|
324
|
-
def test_reports(self):
|
325
|
-
|
326
|
-
fc = self.fc
|
327
|
-
workspace_id = 'd8a5abe0-9eed-406d-ab46-343bc57ddbe5'
|
328
|
-
|
329
|
-
report_name = "report1234"
|
330
|
-
|
331
|
-
report_w_content = fc.get_report(workspace_id, report_name="HelloWorldReport")
|
332
|
-
|
333
|
-
definition = fc.get_report_definition(workspace_id, report_w_content.id)
|
334
|
-
|
335
|
-
self.assertIsNotNone(definition)
|
336
|
-
self.assertIn("definition", definition)
|
337
|
-
definition = definition["definition"]
|
338
|
-
|
339
|
-
report = fc.create_report(workspace_id, display_name=report_name, definition=definition)
|
340
|
-
fc.update_report_definition(workspace_id, report.id, definition=definition)
|
341
|
-
report = fc.get_report(workspace_id, report_id=report.id)
|
342
|
-
self.assertEqual(report.display_name, report_name)
|
343
|
-
self.assertIsNotNone(report.definition)
|
344
|
-
|
345
|
-
reports = fc.list_reports(workspace_id)
|
346
|
-
report_names = [r.display_name for r in reports]
|
347
|
-
self.assertGreater(len(reports), 0)
|
348
|
-
self.assertIn(report_name, report_names)
|
349
|
-
|
350
|
-
r = fc.get_report(workspace_id, report_name=report_name)
|
351
|
-
self.assertIsNotNone(r.id)
|
352
|
-
self.assertEqual(r.display_name, report_name)
|
353
|
-
|
354
|
-
status_code = fc.delete_report(workspace_id, r.id)
|
355
|
-
self.assertEqual(status_code, 200)
|
356
|
-
|
357
|
-
def test_semantic_models(self):
|
358
|
-
|
359
|
-
fc = self.fc
|
360
|
-
workspace_id = 'd8a5abe0-9eed-406d-ab46-343bc57ddbe5'
|
361
|
-
|
362
|
-
semantic_model_name = "semanticmodel1234"
|
363
|
-
|
364
|
-
semantic_model_w_content = fc.get_semantic_model(workspace_id, semantic_model_name="Table")
|
365
|
-
|
366
|
-
definition = fc.get_semantic_model_definition(workspace_id, semantic_model_w_content.id)
|
367
|
-
|
368
|
-
self.assertIsNotNone(definition)
|
369
|
-
self.assertIn("definition", definition)
|
370
|
-
definition = definition["definition"]
|
371
|
-
semantic_model = fc.create_semantic_model(workspace_id, display_name=semantic_model_name, definition=definition)
|
372
|
-
fc.update_semantic_model_definition(workspace_id, semantic_model.id, definition=definition)
|
373
|
-
semantic_model = fc.get_semantic_model(workspace_id, semantic_model_id=semantic_model.id)
|
374
|
-
self.assertEqual(semantic_model.display_name, semantic_model_name)
|
375
|
-
self.assertIsNotNone(semantic_model.definition)
|
376
|
-
|
377
|
-
semantic_models = fc.list_semantic_models(workspace_id)
|
378
|
-
semantic_model_names = [sm.display_name for sm in semantic_models]
|
379
|
-
self.assertGreater(len(semantic_models), 0)
|
380
|
-
self.assertIn(semantic_model_name, semantic_model_names)
|
381
|
-
|
382
|
-
sm = fc.get_semantic_model(workspace_id, semantic_model_name=semantic_model_name)
|
383
|
-
self.assertIsNotNone(sm.id)
|
384
|
-
self.assertEqual(sm.display_name, semantic_model_name)
|
385
|
-
|
386
|
-
status_code = fc.delete_semantic_model(workspace_id, sm.id)
|
387
|
-
self.assertEqual(status_code, 200)
|
388
|
-
|
389
|
-
def test_warehouses(self):
|
390
|
-
|
391
|
-
fc = self.fc
|
392
|
-
workspace_id = 'd8a5abe0-9eed-406d-ab46-343bc57ddbe5'
|
393
|
-
|
394
|
-
datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
|
395
|
-
warehouse1 = f"wh{datetime_str}"
|
396
|
-
warehouse = fc.create_warehouse(workspace_id, display_name=warehouse1)
|
397
|
-
self.assertIsNotNone(warehouse.id)
|
398
|
-
|
399
|
-
warehouses = fc.list_warehouses(workspace_id)
|
400
|
-
warehouse_names = [wh.display_name for wh in warehouses]
|
401
|
-
self.assertGreater(len(warehouses), 0)
|
402
|
-
self.assertIn(warehouse1, warehouse_names)
|
403
|
-
|
404
|
-
warehouse = fc.get_warehouse(workspace_id, warehouse_name=warehouse1)
|
405
|
-
self.assertIsNotNone(warehouse.id)
|
406
|
-
self.assertEqual(warehouse.display_name, warehouse1)
|
407
|
-
|
408
|
-
warehouse2 = fc.update_warehouse(workspace_id, warehouse.id, display_name=f"{warehouse1}2")
|
409
|
-
warehouse = fc.get_warehouse(workspace_id, warehouse_id=warehouse.id)
|
410
|
-
self.assertEqual(warehouse.display_name, f"{warehouse1}2")
|
411
|
-
self.assertEqual(warehouse.id, warehouse2.id)
|
412
|
-
|
413
|
-
status_code = fc.delete_warehouse(workspace_id, warehouse.id)
|
414
|
-
self.assertEqual(status_code, 200)
|
415
|
-
|
416
|
-
|
417
|
-
if __name__ == "__main__":
|
418
|
-
unittest.main()
|
@@ -1,43 +0,0 @@
|
|
1
|
-
import unittest
|
2
|
-
from msfabricpysdkcore.coreapi import FabricClientCore
|
3
|
-
from dotenv import load_dotenv
|
4
|
-
|
5
|
-
load_dotenv()
|
6
|
-
|
7
|
-
|
8
|
-
class TestFabricClientCore(unittest.TestCase):
|
9
|
-
|
10
|
-
def __init__(self, *args, **kwargs):
|
11
|
-
super(TestFabricClientCore, self).__init__(*args, **kwargs)
|
12
|
-
#load_dotenv()
|
13
|
-
self.fc = FabricClientCore()
|
14
|
-
self.workspace_id = '63aa9e13-4912-4abe-9156-8a56e565b7a3'
|
15
|
-
self.item_id = "38a1c15f-8a9e-49c5-8d05-a27cf9ce8b18"
|
16
|
-
|
17
|
-
|
18
|
-
def test_jobs_end_to_end(self):
|
19
|
-
job = self.fc.run_on_demand_item_job(workspace_id=self.workspace_id,
|
20
|
-
item_id=self.item_id,
|
21
|
-
job_type="RunNotebook")
|
22
|
-
|
23
|
-
self.assertEqual(job.item_id, self.item_id)
|
24
|
-
self.assertEqual(job.workspace_id, self.workspace_id)
|
25
|
-
self.assertEqual(job.job_type, "RunNotebook")
|
26
|
-
self.assertIn(job.status, ["NotStarted", "InProgress"])
|
27
|
-
self.assertEqual(job.invoke_type, "Manual")
|
28
|
-
|
29
|
-
job2 = self.fc.get_item_job_instance(workspace_id=self.workspace_id,
|
30
|
-
item_id=self.item_id,
|
31
|
-
job_instance_id=job.id)
|
32
|
-
|
33
|
-
self.assertEqual(job.id, job2.id)
|
34
|
-
|
35
|
-
status_code = self.fc.cancel_item_job_instance(workspace_id=self.workspace_id,
|
36
|
-
item_id=self.item_id,
|
37
|
-
job_instance_id=job.id)
|
38
|
-
|
39
|
-
self.assertEqual(status_code, 202)
|
40
|
-
|
41
|
-
if __name__ == "__main__":
|
42
|
-
unittest.main()
|
43
|
-
|
@@ -1,49 +0,0 @@
|
|
1
|
-
import unittest
|
2
|
-
from datetime import datetime
|
3
|
-
from dotenv import load_dotenv
|
4
|
-
from time import sleep
|
5
|
-
from msfabricpysdkcore.coreapi import FabricClientCore
|
6
|
-
|
7
|
-
load_dotenv()
|
8
|
-
|
9
|
-
class TestFabricClientCore(unittest.TestCase):
|
10
|
-
|
11
|
-
def __init__(self, *args, **kwargs):
|
12
|
-
super(TestFabricClientCore, self).__init__(*args, **kwargs)
|
13
|
-
#load_dotenv()
|
14
|
-
self.fc = FabricClientCore()
|
15
|
-
|
16
|
-
datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
|
17
|
-
self.item_name = "testitem" + datetime_str
|
18
|
-
self.item_type = "Notebook"
|
19
|
-
|
20
|
-
def test_kql_querysets(self):
|
21
|
-
|
22
|
-
fc = self.fc
|
23
|
-
workspace_id = '63aa9e13-4912-4abe-9156-8a56e565b7a3'
|
24
|
-
|
25
|
-
kql_queryset_name = "kqlqueryset1"
|
26
|
-
|
27
|
-
kql_querysets = fc.list_kql_querysets(workspace_id)
|
28
|
-
kql_queryset_names = [kqlq.display_name for kqlq in kql_querysets]
|
29
|
-
self.assertGreater(len(kql_querysets), 0)
|
30
|
-
self.assertIn(kql_queryset_name, kql_queryset_names)
|
31
|
-
|
32
|
-
kqlq = fc.get_kql_queryset(workspace_id, kql_queryset_name=kql_queryset_name)
|
33
|
-
self.assertIsNotNone(kqlq.id)
|
34
|
-
self.assertEqual(kqlq.display_name, kql_queryset_name)
|
35
|
-
|
36
|
-
kqlq2 = fc.update_kql_queryset(workspace_id, kqlq.id, display_name=f"{kql_queryset_name}2", return_item=True)
|
37
|
-
|
38
|
-
kqlq = fc.get_kql_queryset(workspace_id, kql_queryset_id=kqlq.id)
|
39
|
-
self.assertEqual(kqlq.display_name, f"{kql_queryset_name}2")
|
40
|
-
self.assertEqual(kqlq.id, kqlq2.id)
|
41
|
-
|
42
|
-
kqlq2 = fc.update_kql_queryset(workspace_id, kqlq.id, display_name=kql_queryset_name, return_item=True)
|
43
|
-
|
44
|
-
kqlq = fc.get_kql_queryset(workspace_id, kql_queryset_id=kqlq.id)
|
45
|
-
self.assertEqual(kqlq.display_name, kql_queryset_name)
|
46
|
-
self.assertEqual(kqlq.id, kqlq2.id)
|
47
|
-
|
48
|
-
# status_code = fc.delete_kql_queryset(workspace_id, kqlq.id)
|
49
|
-
# self.assertEqual(status_code, 200)
|
@@ -1,48 +0,0 @@
|
|
1
|
-
import unittest
|
2
|
-
from datetime import datetime
|
3
|
-
from dotenv import load_dotenv
|
4
|
-
from time import sleep
|
5
|
-
from msfabricpysdkcore.coreapi import FabricClientCore
|
6
|
-
|
7
|
-
load_dotenv()
|
8
|
-
|
9
|
-
class TestFabricClientCore(unittest.TestCase):
|
10
|
-
|
11
|
-
def __init__(self, *args, **kwargs):
|
12
|
-
super(TestFabricClientCore, self).__init__(*args, **kwargs)
|
13
|
-
#load_dotenv()
|
14
|
-
self.fc = FabricClientCore()
|
15
|
-
|
16
|
-
def test_kql_database(self):
|
17
|
-
|
18
|
-
fc = self.fc
|
19
|
-
workspace_id = '63aa9e13-4912-4abe-9156-8a56e565b7a3'
|
20
|
-
evenhouse_id = "f30ba76a-92c3-40d3-ad69-36db059c113d"
|
21
|
-
|
22
|
-
creation_payload = {"databaseType" : "ReadWrite",
|
23
|
-
"parentEventhouseItemId" : evenhouse_id}
|
24
|
-
|
25
|
-
datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
|
26
|
-
kqldb_name = "kql" + datetime_str
|
27
|
-
kqldb = fc.create_kql_database(workspace_id = workspace_id, display_name=kqldb_name,
|
28
|
-
creation_payload=creation_payload)
|
29
|
-
self.assertEqual(kqldb.display_name, kqldb_name)
|
30
|
-
|
31
|
-
kql_databases = fc.list_kql_databases(workspace_id)
|
32
|
-
kql_database_names = [kqldb.display_name for kqldb in kql_databases]
|
33
|
-
self.assertGreater(len(kql_databases), 0)
|
34
|
-
self.assertIn(kqldb_name, kql_database_names)
|
35
|
-
|
36
|
-
kqldb = fc.get_kql_database(workspace_id, kql_database_name=kqldb_name)
|
37
|
-
self.assertIsNotNone(kqldb.id)
|
38
|
-
self.assertEqual(kqldb.display_name, kqldb_name)
|
39
|
-
|
40
|
-
new_name = kqldb_name+"2"
|
41
|
-
kqldb2 = fc.update_kql_database(workspace_id, kqldb.id, display_name=new_name, return_item=True)
|
42
|
-
|
43
|
-
kqldb = fc.get_kql_database(workspace_id, kql_database_id=kqldb.id)
|
44
|
-
self.assertEqual(kqldb.display_name, new_name)
|
45
|
-
self.assertEqual(kqldb.id, kqldb2.id)
|
46
|
-
|
47
|
-
status_code = fc.delete_kql_database(workspace_id, kqldb.id)
|
48
|
-
self.assertEqual(status_code, 200)
|
@@ -1,84 +0,0 @@
|
|
1
|
-
import unittest
|
2
|
-
from datetime import datetime
|
3
|
-
from dotenv import load_dotenv
|
4
|
-
from time import sleep
|
5
|
-
from msfabricpysdkcore.coreapi import FabricClientCore
|
6
|
-
|
7
|
-
load_dotenv()
|
8
|
-
|
9
|
-
class TestFabricClientCore(unittest.TestCase):
|
10
|
-
|
11
|
-
def __init__(self, *args, **kwargs):
|
12
|
-
super(TestFabricClientCore, self).__init__(*args, **kwargs)
|
13
|
-
#load_dotenv()
|
14
|
-
self.fc = FabricClientCore()
|
15
|
-
|
16
|
-
datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
|
17
|
-
self.item_name = "testitem" + datetime_str
|
18
|
-
self.item_type = "Notebook"
|
19
|
-
|
20
|
-
def test_lakehouse(self):
|
21
|
-
workspace_id = '63aa9e13-4912-4abe-9156-8a56e565b7a3'
|
22
|
-
|
23
|
-
lakehouse = self.fc.get_item(workspace_id=workspace_id, item_name="lakelhousewlabels", item_type="Lakehouse")
|
24
|
-
self.assertIsNotNone(lakehouse.properties)
|
25
|
-
lakehouse_id = lakehouse.id
|
26
|
-
date_str = datetime.now().strftime("%Y%m%d%H%M%S")
|
27
|
-
table_name = f"table{date_str}"
|
28
|
-
|
29
|
-
|
30
|
-
status_code = self.fc.load_table(workspace_id=workspace_id, lakehouse_id=lakehouse_id, table_name=table_name,
|
31
|
-
path_type="File", relative_path="Files/to_share/titanic2.csv")
|
32
|
-
|
33
|
-
self.assertEqual(status_code, 202)
|
34
|
-
|
35
|
-
# Run on demand table maintenance
|
36
|
-
table_name_maintenance = "table20240515114529"
|
37
|
-
|
38
|
-
execution_data = {
|
39
|
-
"tableName": table_name_maintenance,
|
40
|
-
"optimizeSettings": {
|
41
|
-
"vOrder": True,
|
42
|
-
"zOrderBy": [
|
43
|
-
"tipAmount"
|
44
|
-
]
|
45
|
-
},
|
46
|
-
"vacuumSettings": {
|
47
|
-
"retentionPeriod": "7:01:00:00"
|
48
|
-
}
|
49
|
-
}
|
50
|
-
|
51
|
-
response = self.fc.run_on_demand_table_maintenance(workspace_id=workspace_id, lakehouse_id=lakehouse_id,
|
52
|
-
execution_data = execution_data,
|
53
|
-
job_type = "TableMaintenance", wait_for_completion = False)
|
54
|
-
self.assertIn(response.status_code, [200, 202])
|
55
|
-
|
56
|
-
table_list = self.fc.list_tables(workspace_id=workspace_id, lakehouse_id=lakehouse_id)
|
57
|
-
table_names = [table["name"] for table in table_list]
|
58
|
-
|
59
|
-
self.assertIn(table_name, table_names)
|
60
|
-
|
61
|
-
fc = self.fc
|
62
|
-
|
63
|
-
lakehouse = fc.create_lakehouse(workspace_id=workspace_id, display_name="lakehouse2")
|
64
|
-
self.assertIsNotNone(lakehouse.id)
|
65
|
-
|
66
|
-
lakehouses = fc.list_lakehouses(workspace_id)
|
67
|
-
lakehouse_names = [lh.display_name for lh in lakehouses]
|
68
|
-
self.assertGreater(len(lakehouse_names), 0)
|
69
|
-
self.assertIn("lakehouse2", lakehouse_names)
|
70
|
-
|
71
|
-
lakehouse2 = fc.get_lakehouse(workspace_id=workspace_id, lakehouse_id=lakehouse.id)
|
72
|
-
self.assertEqual(lakehouse.id, lakehouse2.id)
|
73
|
-
|
74
|
-
sleep(20)
|
75
|
-
lakehouse2 = fc.update_lakehouse(workspace_id=workspace_id, lakehouse_id=lakehouse.id, display_name="lakehouse3", return_item=True)
|
76
|
-
self.assertEqual(lakehouse2.display_name, "lakehouse3")
|
77
|
-
|
78
|
-
id = lakehouse2.id
|
79
|
-
|
80
|
-
lakehouse2 = fc.get_lakehouse(workspace_id=workspace_id, lakehouse_name="lakehouse3")
|
81
|
-
self.assertEqual(lakehouse2.id, id)
|
82
|
-
|
83
|
-
status_code = fc.delete_lakehouse(workspace_id=workspace_id, lakehouse_id=lakehouse.id)
|
84
|
-
self.assertEqual(status_code, 200)
|
@@ -1,47 +0,0 @@
|
|
1
|
-
import unittest
|
2
|
-
from datetime import datetime
|
3
|
-
from dotenv import load_dotenv
|
4
|
-
from time import sleep
|
5
|
-
from msfabricpysdkcore.coreapi import FabricClientCore
|
6
|
-
|
7
|
-
load_dotenv()
|
8
|
-
|
9
|
-
class TestFabricClientCore(unittest.TestCase):
|
10
|
-
|
11
|
-
def __init__(self, *args, **kwargs):
|
12
|
-
super(TestFabricClientCore, self).__init__(*args, **kwargs)
|
13
|
-
#load_dotenv()
|
14
|
-
self.fc = FabricClientCore()
|
15
|
-
|
16
|
-
datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
|
17
|
-
self.item_name = "testitem" + datetime_str
|
18
|
-
self.item_type = "Notebook"
|
19
|
-
|
20
|
-
|
21
|
-
def test_ml_experiments(self):
|
22
|
-
|
23
|
-
fc = self.fc
|
24
|
-
workspace_id = '63aa9e13-4912-4abe-9156-8a56e565b7a3'
|
25
|
-
mlexperiment_name = "mlexp" + datetime.now().strftime("%Y%m%d%H%M%S")
|
26
|
-
mlexperiment_name2 = "mlexp2" + datetime.now().strftime("%Y%m%d%H%M%S")
|
27
|
-
|
28
|
-
ml_experiment = fc.create_ml_experiment(workspace_id, display_name=mlexperiment_name)
|
29
|
-
self.assertEqual(ml_experiment.display_name, mlexperiment_name)
|
30
|
-
|
31
|
-
ml_experiments = fc.list_ml_experiments(workspace_id)
|
32
|
-
ml_experiment_names = [mle.display_name for mle in ml_experiments]
|
33
|
-
self.assertGreater(len(ml_experiments), 0)
|
34
|
-
self.assertIn(mlexperiment_name, ml_experiment_names)
|
35
|
-
|
36
|
-
mle = fc.get_ml_experiment(workspace_id, ml_experiment_name=mlexperiment_name)
|
37
|
-
self.assertIsNotNone(mle.id)
|
38
|
-
self.assertEqual(mle.display_name, mlexperiment_name)
|
39
|
-
|
40
|
-
mle2 = fc.update_ml_experiment(workspace_id, mle.id, display_name=mlexperiment_name2, return_item=True)
|
41
|
-
|
42
|
-
mle = fc.get_ml_experiment(workspace_id, ml_experiment_id=mle.id)
|
43
|
-
self.assertEqual(mle.display_name, mlexperiment_name2)
|
44
|
-
self.assertEqual(mle.id, mle2.id)
|
45
|
-
|
46
|
-
status_code = fc.delete_ml_experiment(workspace_id, mle.id)
|
47
|
-
self.assertEqual(status_code, 200)
|