msfabricpysdkcore 0.0.12__py3-none-any.whl → 0.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. msfabricpysdkcore/admin_item.py +18 -44
  2. msfabricpysdkcore/admin_workspace.py +13 -60
  3. msfabricpysdkcore/adminapi.py +400 -429
  4. msfabricpysdkcore/client.py +115 -1
  5. msfabricpysdkcore/coreapi.py +2553 -763
  6. msfabricpysdkcore/deployment_pipeline.py +34 -146
  7. msfabricpysdkcore/domain.py +20 -219
  8. msfabricpysdkcore/environment.py +13 -171
  9. msfabricpysdkcore/item.py +63 -198
  10. msfabricpysdkcore/job_instance.py +8 -22
  11. msfabricpysdkcore/lakehouse.py +9 -118
  12. msfabricpysdkcore/long_running_operation.py +7 -37
  13. msfabricpysdkcore/onelakeshortcut.py +7 -21
  14. msfabricpysdkcore/otheritems.py +66 -91
  15. msfabricpysdkcore/spark_custom_pool.py +7 -47
  16. msfabricpysdkcore/tests/test_admin_apis.py +21 -1
  17. msfabricpysdkcore/tests/test_datapipelines.py +14 -17
  18. msfabricpysdkcore/tests/test_deployment_pipeline.py +3 -3
  19. msfabricpysdkcore/tests/test_domains.py +4 -3
  20. msfabricpysdkcore/tests/test_environments.py +51 -2
  21. msfabricpysdkcore/tests/test_evenhouses.py +48 -0
  22. msfabricpysdkcore/tests/test_evenstreams.py +1 -1
  23. msfabricpysdkcore/tests/test_external_data_shares.py +51 -0
  24. msfabricpysdkcore/tests/test_items.py +80 -0
  25. msfabricpysdkcore/tests/test_kql_queryset.py +50 -0
  26. msfabricpysdkcore/tests/test_kqldatabases.py +1 -1
  27. msfabricpysdkcore/tests/test_lakehouse.py +86 -0
  28. msfabricpysdkcore/tests/test_ml_experiments.py +48 -0
  29. msfabricpysdkcore/tests/test_ml_models.py +48 -0
  30. msfabricpysdkcore/tests/test_notebooks.py +58 -0
  31. msfabricpysdkcore/tests/test_one_lake_data_access_security.py +65 -0
  32. msfabricpysdkcore/tests/test_other_items.py +46 -0
  33. msfabricpysdkcore/tests/test_reports.py +53 -0
  34. msfabricpysdkcore/tests/test_semantic_model.py +51 -0
  35. msfabricpysdkcore/tests/test_spark.py +7 -5
  36. msfabricpysdkcore/tests/test_sparkjobdefinition.py +1 -1
  37. msfabricpysdkcore/tests/test_warehouses.py +51 -0
  38. msfabricpysdkcore/tests/test_workspaces_capacities.py +7 -4
  39. msfabricpysdkcore/workspace.py +397 -1121
  40. {msfabricpysdkcore-0.0.12.dist-info → msfabricpysdkcore-0.1.1.dist-info}/METADATA +110 -8
  41. msfabricpysdkcore-0.1.1.dist-info/RECORD +52 -0
  42. {msfabricpysdkcore-0.0.12.dist-info → msfabricpysdkcore-0.1.1.dist-info}/WHEEL +1 -1
  43. msfabricpysdkcore-0.0.12.dist-info/RECORD +0 -39
  44. {msfabricpysdkcore-0.0.12.dist-info → msfabricpysdkcore-0.1.1.dist-info}/LICENSE +0 -0
  45. {msfabricpysdkcore-0.0.12.dist-info → msfabricpysdkcore-0.1.1.dist-info}/top_level.txt +0 -0
@@ -1,13 +1,11 @@
1
1
  import json
2
- from time import sleep
3
-
4
- import requests
2
+ from msfabricpysdkcore.coreapi import FabricClientCore
5
3
 
6
4
 
7
5
  class SparkCustomPool:
8
6
  """Class to represent a custom pool in Microsoft Fabric"""
9
7
 
10
- def __init__(self, id, name, type, node_family, node_size, auto_scale, dynamic_executor_allocation, workspace_id, auth) -> None:
8
+ def __init__(self, id, name, type, node_family, node_size, auto_scale, dynamic_executor_allocation, workspace_id, core_client: FabricClientCore) -> None:
11
9
 
12
10
  self.id = id
13
11
  self.name = name
@@ -18,7 +16,7 @@ class SparkCustomPool:
18
16
  self.dynamic_executor_allocation = dynamic_executor_allocation
19
17
  self.workspace_id = workspace_id
20
18
 
21
- self.auth = auth
19
+ self.core_client = core_client
22
20
 
23
21
  def __str__(self) -> str:
24
22
  """Return a string representation of the workspace object"""
@@ -37,7 +35,7 @@ class SparkCustomPool:
37
35
  def __repr__(self) -> str:
38
36
  return self.__str__()
39
37
 
40
- def from_dict(item_dict, auth):
38
+ def from_dict(item_dict, core_client):
41
39
  """Create Item object from dictionary"""
42
40
 
43
41
  if 'autoScale' not in item_dict:
@@ -54,55 +52,17 @@ class SparkCustomPool:
54
52
 
55
53
  return SparkCustomPool(id=item_dict['id'], name=item_dict['name'], type=item_dict['type'], node_family=item_dict['nodeFamily'],
56
54
  node_size=item_dict['nodeSize'], auto_scale=item_dict['autoScale'], dynamic_executor_allocation=item_dict['dynamicExecutorAllocation'],
57
- workspace_id=item_dict['workspaceId'], auth=auth)
55
+ workspace_id=item_dict['workspaceId'], core_client=core_client)
58
56
 
59
57
 
60
58
  def delete(self):
61
59
  """Delete the custom pool item"""
62
- # DELETE http://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/spark/pools/{poolId}
63
-
64
- url = f"https://api.fabric.microsoft.com/v1/workspaces/{self.workspace_id}/spark/pools/{self.id}"
65
- for _ in range(10):
66
- response = requests.delete(url=url, headers=self.auth.get_headers())
67
- if response.status_code == 429:
68
- print("Too many requests, waiting 10 seconds")
69
- sleep(10)
70
- continue
71
- if response.status_code not in (200, 429):
72
- raise Exception(f"Error deleting spark pool: {response.status_code}, {response.text}")
73
- break
74
-
75
- return response.status_code
60
+ return self.core_client.delete_workspace_custom_pool(self.workspace_id, self.id)
76
61
 
77
62
 
78
63
  def update(self, name, node_family, node_size, auto_scale, dynamic_executor_allocation):
79
64
  """Update the custom pool item"""
80
- url = f"https://api.fabric.microsoft.com/v1/workspaces/{self.workspace_id}/spark/pools/{self.id}"
81
- body = {}
82
-
83
- if name is not None:
84
- body['name'] = name
85
- if node_family is not None:
86
- body['nodeFamily'] = node_family
87
- if node_size is not None:
88
- body['nodeSize'] = node_size
89
- if auto_scale is not None:
90
- body['autoScale'] = auto_scale
91
- if dynamic_executor_allocation is not None:
92
- body['dynamicExecutorAllocation'] = dynamic_executor_allocation
93
-
94
- if not body:
95
- return self
96
- for _ in range(10):
97
- response = requests.patch(url=url, headers=self.auth.get_headers(), json=body)
98
- if response.status_code == 429:
99
- print("Too many requests, waiting 10 seconds")
100
- sleep(10)
101
- continue
102
- if response.status_code not in (200, 429):
103
- raise Exception(f"Error updating item: {response.status_code}, {response.text}")
104
- break
105
-
65
+ _ = self.core_client.update_workspace_custom_pool(self.workspace_id, self.id, name, node_family, node_size, auto_scale, dynamic_executor_allocation)
106
66
  if name is not None:
107
67
  self.name = name
108
68
  if node_family is not None:
@@ -25,7 +25,7 @@ class TestFabricClientCore(unittest.TestCase):
25
25
 
26
26
  self.assertEqual(ws.id, ws_clone.id)
27
27
 
28
- # Get workspace access details
28
+ # List workspace access details
29
29
 
30
30
  ws_access = fca.list_workspace_access_details(ws.id)
31
31
  principials = ws_access["accessDetails"]
@@ -77,3 +77,23 @@ class TestFabricClientCore(unittest.TestCase):
77
77
  self.assertEqual(resp["itemsChangeLabelStatus"][0]["status"], "Succeeded")
78
78
  resp = fca.bulk_remove_labels(items=items)
79
79
  self.assertEqual(resp["itemsChangeLabelStatus"][0]["status"], "Succeeded")
80
+
81
+ def test_admin_external_data_shares(self):
82
+
83
+ fca = self.fca
84
+
85
+ data_shares = fca.list_external_data_shares()
86
+ ws = fca.list_workspaces(name="testworkspace")[0]
87
+
88
+ data_shares = [d for d in data_shares if d['workspaceId'] == ws.id]
89
+
90
+ self.assertGreater(len(data_shares), 0)
91
+ fca.revoke_external_data_share(external_data_share_id = data_shares[0]['id'],
92
+ item_id = data_shares[0]['itemId'],
93
+ workspace_id = data_shares[0]['workspaceId'])
94
+ data_shares = fca.list_external_data_shares()
95
+ ws = fca.list_workspaces(name="testworkspace")[0]
96
+
97
+ data_shares = [d for d in data_shares if d['workspaceId'] == ws.id]
98
+
99
+ self.assertEqual(data_shares[0]['status'], 'Revoked')
@@ -17,32 +17,29 @@ class TestFabricClientCore(unittest.TestCase):
17
17
 
18
18
  fc = self.fc
19
19
  workspace_id = 'd8a5abe0-9eed-406d-ab46-343bc57ddbe5'
20
-
20
+ datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
21
+ pipeline_name = f"pipeline_{datetime_str}"
22
+
23
+ dp = fc.create_data_pipeline(workspace_id, display_name=pipeline_name, description="asda")
24
+ dp.update_definition(dp.definition)
25
+
21
26
  dps = fc.list_data_pipelines(workspace_id)
22
27
  dp_names = [dp.display_name for dp in dps]
23
28
  self.assertGreater(len(dps), 0)
24
- self.assertIn("pipeline1", dp_names)
25
-
26
- dp = fc.get_data_pipeline(workspace_id, data_pipeline_name="pipeline1")
27
- self.assertIsNotNone(dp.id)
28
- self.assertIsNotNone(dp.definition)
29
- self.assertEqual(dp.display_name, "pipeline1")
30
-
31
- dp_new = fc.create_data_pipeline(workspace_id, display_name="pipeline_new", description="asda")
32
- dp_new.update_definition(dp.definition)
33
-
34
- self.assertEqual(dp_new.display_name, "pipeline_new")
29
+ self.assertIn(pipeline_name, dp_names)
35
30
 
36
- dp2 = fc.update_data_pipeline(workspace_id, dp.id, display_name="pipeline2")
31
+ self.assertEqual(dp.display_name, pipeline_name)
32
+ pipeline_name2 = f"pipeline_{datetime_str}_2"
33
+ dp2 = fc.update_data_pipeline(workspace_id, dp.id, display_name=pipeline_name2, return_item=True)
37
34
 
38
35
  dp = fc.get_data_pipeline(workspace_id, data_pipeline_id=dp.id)
39
- self.assertEqual(dp.display_name, "pipeline2")
36
+ self.assertEqual(dp.display_name, pipeline_name2)
40
37
  self.assertEqual(dp.id, dp2.id)
41
38
 
42
- dp2 = fc.update_data_pipeline(workspace_id, dp.id, display_name="pipeline1")
39
+ dp2 = fc.update_data_pipeline(workspace_id, dp.id, display_name=pipeline_name, return_item=True)
43
40
 
44
41
  dp = fc.get_data_pipeline(workspace_id, data_pipeline_id=dp.id)
45
- self.assertEqual(dp.display_name, "pipeline1")
42
+ self.assertEqual(dp.display_name, pipeline_name)
46
43
  self.assertEqual(dp.id, dp2.id)
47
- status_code = fc.delete_data_pipeline(workspace_id, dp_new.id)
44
+ status_code = fc.delete_data_pipeline(workspace_id, dp.id)
48
45
  self.assertEqual(status_code, 200)
@@ -32,7 +32,7 @@ class TestFabricClientCore(unittest.TestCase):
32
32
  self.assertEqual(pipe.display_name, 'sdkpipe')
33
33
  self.assertEqual(pipe.id, pipe_id)
34
34
 
35
- stages = fc.get_deployment_pipeline_stages(pipe_id)
35
+ stages = fc.list_deployment_pipeline_stages(pipe_id)
36
36
 
37
37
  self.assertGreater(len(stages), 0)
38
38
  names = [stage.display_name for stage in stages]
@@ -42,12 +42,12 @@ class TestFabricClientCore(unittest.TestCase):
42
42
  dev_stage = [stage for stage in stages if stage.display_name == "Development"][0]
43
43
  prod_stage = [stage for stage in stages if stage.display_name == "Production"][0]
44
44
 
45
- items = fc.get_deployment_pipeline_stages_items(pipeline_id=pipe_id, stage_id=dev_stage.id)
45
+ items = fc.list_deployment_pipeline_stages_items(deployment_pipeline_id=pipe_id, stage_id=dev_stage.id)
46
46
 
47
47
  self.assertGreater(len(items), 0)
48
48
  self.assertIn("cicdlakehouse", [item["itemDisplayName"] for item in items])
49
49
 
50
- items = [item for item in dev_stage.get_items() if item["itemDisplayName"] == 'cicdlakehouse']
50
+ items = [item for item in dev_stage.list_items() if item["itemDisplayName"] == 'cicdlakehouse']
51
51
  item = items[0]
52
52
  item = {"sourceItemId": item["itemId"],
53
53
  "itemType": item["itemType"]}
@@ -1,5 +1,6 @@
1
1
  import unittest
2
2
  from dotenv import load_dotenv
3
+ from datetime import datetime
3
4
  from msfabricpysdkcore import FabricClientCore, FabricClientAdmin
4
5
 
5
6
 
@@ -32,7 +33,7 @@ class TestFabricClientCore(unittest.TestCase):
32
33
  pass
33
34
 
34
35
  # Create domain
35
- domain_name = "sdktestdomains"
36
+ domain_name = "sdktestdomains" + datetime.now().strftime("%Y%m%d%H%M%S")
36
37
  domain = fca.create_domain(display_name=domain_name)
37
38
  self.assertIsNotNone(domain.id)
38
39
  self.assertEqual(domain.display_name, domain_name)
@@ -54,8 +55,8 @@ class TestFabricClientCore(unittest.TestCase):
54
55
  self.assertIn(domain.id, domains_ids)
55
56
 
56
57
  # Update domain
57
- domain_new_name = "sdktestdomains2"
58
- domain_clone = fca.update_domain(domain.id, display_name=domain_new_name)
58
+ domain_new_name = f"{domain_name}2"
59
+ domain_clone = fca.update_domain(domain.id, display_name=domain_new_name, return_item=True)
59
60
  self.assertEqual(domain_clone.display_name, domain_new_name)
60
61
 
61
62
  # Assign domain workspaces by Ids
@@ -30,7 +30,7 @@ class TestFabricClientCore(unittest.TestCase):
30
30
  self.assertIsNotNone(env.id)
31
31
  self.assertEqual(env.display_name, env_name)
32
32
  new_name = env_name + "2"
33
- env2 = fc.update_environment(workspace_id, env.id, display_name=new_name)
33
+ env2 = fc.update_environment(workspace_id, env.id, display_name=new_name, return_item=True)
34
34
 
35
35
  env = fc.get_environment(workspace_id, environment_id=env.id)
36
36
  self.assertEqual(env.display_name, new_name)
@@ -62,4 +62,53 @@ class TestFabricClientCore(unittest.TestCase):
62
62
  updated_settings = fc.get_staging_settings(workspace_id=workspace_id, environment_id=environment_id)
63
63
  self.assertIn("instancePool", updated_settings)
64
64
  self.assertIn("dynamicExecutorAllocation", updated_settings)
65
- self.assertEqual(updated_settings["driverCores"], driver_cores)
65
+ self.assertEqual(updated_settings["driverCores"], driver_cores)
66
+
67
+
68
+ def test_environment_spark_libraries(self):
69
+ fc = self.fc
70
+ workspace_id = 'd8a5abe0-9eed-406d-ab46-343bc57ddbe5'
71
+ environment_id = 'fae6d1a7-d671-4091-89b1-f42626deb56f'
72
+
73
+ resp = fc.get_published_libraries(workspace_id, environment_id)
74
+ self.assertIn('customLibraries', resp)
75
+ self.assertIn('wheelFiles', resp['customLibraries'])
76
+ self.assertIn('msfabricpysdkcore-0.0.13-py3-none-any.whl', resp['customLibraries']['wheelFiles'])
77
+
78
+ resp = fc.upload_staging_library(workspace_id, environment_id, 'dummy.whl')
79
+ self.assertEqual(resp.status_code, 200)
80
+
81
+ resp = fc.get_staging_libraries(workspace_id, environment_id)
82
+
83
+ self.assertIn('customLibraries', resp)
84
+ self.assertIn('wheelFiles', resp['customLibraries'])
85
+ self.assertIn('dummy.whl', resp['customLibraries']['wheelFiles'])
86
+ self.assertIn('staging.whl', resp['customLibraries']['wheelFiles'])
87
+
88
+
89
+ resp = fc.publish_environment(workspace_id, environment_id)
90
+ self.assertIn('publishDetails', resp)
91
+ self.assertIn('state', resp['publishDetails'])
92
+ self.assertEqual(resp['publishDetails']['state'], 'running')
93
+
94
+
95
+ resp = fc.cancel_publish(workspace_id, environment_id)
96
+ self.assertIn('publishDetails', resp)
97
+ self.assertIn('state', resp['publishDetails'])
98
+ self.assertEqual(resp['publishDetails']['state'], 'cancelled')
99
+
100
+ resp = fc.delete_staging_library(workspace_id, environment_id, 'dummy.whl')
101
+ self.assertEqual(resp.status_code, 200)
102
+
103
+ resp = fc.get_staging_libraries(workspace_id, environment_id)
104
+
105
+ self.assertIn('customLibraries', resp)
106
+ self.assertIn('wheelFiles', resp['customLibraries'])
107
+ self.assertNotIn('dummy.whl', resp['customLibraries']['wheelFiles'])
108
+ self.assertIn('staging.whl', resp['customLibraries']['wheelFiles'])
109
+
110
+
111
+
112
+
113
+
114
+
@@ -0,0 +1,48 @@
1
+ import unittest
2
+ from datetime import datetime
3
+ from dotenv import load_dotenv
4
+ from time import sleep
5
+ from msfabricpysdkcore.coreapi import FabricClientCore
6
+
7
+ load_dotenv()
8
+
9
+ class TestFabricClientCore(unittest.TestCase):
10
+
11
+ def __init__(self, *args, **kwargs):
12
+ super(TestFabricClientCore, self).__init__(*args, **kwargs)
13
+ #load_dotenv()
14
+ self.fc = FabricClientCore()
15
+ self.workspace_id = "c3352d34-0b54-40f0-b204-cc964b1beb8d"
16
+
17
+ datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
18
+ self.item_name = "testitem" + datetime_str
19
+ self.item_type = "Notebook"
20
+
21
+
22
+
23
+ def test_eventhouses(self):
24
+
25
+ fc = self.fc
26
+ workspace_id = 'd8a5abe0-9eed-406d-ab46-343bc57ddbe5'
27
+ datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
28
+ eventhouse_name = "evh" + datetime_str
29
+ eventhouse1 = fc.create_eventhouse(workspace_id, display_name=eventhouse_name)
30
+ self.assertEqual(eventhouse1.display_name, eventhouse_name)
31
+
32
+ eventhouses = fc.list_eventhouses(workspace_id)
33
+ eventhouse_names = [eh.display_name for eh in eventhouses]
34
+ self.assertGreater(len(eventhouses), 0)
35
+ self.assertIn(eventhouse_name, eventhouse_names)
36
+
37
+ eh = fc.get_eventhouse(workspace_id, eventhouse_name=eventhouse_name)
38
+ self.assertIsNotNone(eh.id)
39
+ self.assertEqual(eh.display_name, eventhouse_name)
40
+ new_display_name = eventhouse_name + "2"
41
+ eh2 = fc.update_eventhouse(workspace_id, eh.id, display_name=new_display_name, return_item=True)
42
+
43
+ eh = fc.get_eventhouse(workspace_id, eventhouse_id=eh.id)
44
+ self.assertEqual(eh.display_name, new_display_name)
45
+ self.assertEqual(eh.id, eh2.id)
46
+
47
+ status_code = fc.delete_eventhouse(workspace_id, eh.id)
48
+ self.assertEqual(status_code, 200)
@@ -34,7 +34,7 @@ class TestFabricClientCore(unittest.TestCase):
34
34
  self.assertIsNotNone(es.id)
35
35
  self.assertEqual(es.display_name, es_name)
36
36
 
37
- es2 = fc.update_eventstream(workspace_id, es.id, display_name=f"{es_name}2")
37
+ es2 = fc.update_eventstream(workspace_id, es.id, display_name=f"{es_name}2", return_item=True)
38
38
 
39
39
  es = fc.get_eventstream(workspace_id, eventstream_id=es.id)
40
40
  self.assertEqual(es.display_name, f"{es_name}2")
@@ -0,0 +1,51 @@
1
+ import unittest
2
+ from dotenv import load_dotenv
3
+ from msfabricpysdkcore.coreapi import FabricClientCore
4
+
5
+ load_dotenv()
6
+
7
+ class TestFabricClientCore(unittest.TestCase):
8
+
9
+ def __init__(self, *args, **kwargs):
10
+ super(TestFabricClientCore, self).__init__(*args, **kwargs)
11
+ self.fc = FabricClientCore()
12
+
13
+ def test_external_data_shares(self):
14
+
15
+ fc = self.fc
16
+
17
+ workspace_id = 'c3352d34-0b54-40f0-b204-cc964b1beb8d'
18
+ item_id = 'e2c09c89-bf97-4f71-bdeb-36338795ec36'
19
+
20
+ recipient = {
21
+ "userPrincipalName": "lisa4@fabrikam.com"
22
+ }
23
+ paths=["Files/external"]
24
+
25
+ resp = fc.create_external_data_share(workspace_id, item_id, paths, recipient)
26
+ self.assertIsNotNone(resp)
27
+ self.assertIn('id', resp)
28
+
29
+
30
+ get = fc.get_external_data_share(workspace_id, item_id, resp['id'])
31
+ self.assertIsNotNone(get)
32
+ self.assertEqual(get['id'], resp['id'])
33
+
34
+
35
+ resp = fc.list_external_data_shares_in_item(workspace_id, item_id)
36
+ self.assertGreater(len(resp), 0)
37
+
38
+ data_share_ids = [ds['id'] for ds in resp]
39
+ self.assertIn(get['id'], data_share_ids)
40
+
41
+
42
+ resp = fc.revoke_external_data_share(workspace_id, item_id, get['id'])
43
+ self.assertEqual(resp, 200)
44
+
45
+ get2 = fc.get_external_data_share(workspace_id, item_id, get['id'])
46
+ self.assertIsNotNone(get2)
47
+
48
+ self.assertEqual(get['id'], get2['id'])
49
+ self.assertEqual(get2['status'], 'Revoked')
50
+
51
+
@@ -0,0 +1,80 @@
1
+ import unittest
2
+ from datetime import datetime
3
+ from dotenv import load_dotenv
4
+ from time import sleep
5
+ from msfabricpysdkcore.coreapi import FabricClientCore
6
+
7
+ load_dotenv()
8
+
9
+ class TestFabricClientCore(unittest.TestCase):
10
+
11
+ def __init__(self, *args, **kwargs):
12
+ super(TestFabricClientCore, self).__init__(*args, **kwargs)
13
+ #load_dotenv()
14
+ self.fc = FabricClientCore()
15
+ self.workspace_id = "c3352d34-0b54-40f0-b204-cc964b1beb8d"
16
+
17
+ datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
18
+ self.item_name = "testitem" + datetime_str
19
+ self.item_type = "Notebook"
20
+
21
+ def test_item_end_to_end(self):
22
+
23
+ item = self.fc.create_item(display_name=self.item_name, type=self.item_type, workspace_id=self.workspace_id)
24
+ self.assertEqual(item.display_name, self.item_name)
25
+ self.assertEqual(item.type, self.item_type)
26
+ self.assertEqual(item.workspace_id, self.workspace_id)
27
+ self.assertEqual(item.description, "")
28
+
29
+ item = self.fc.get_item(workspace_id=self.workspace_id, item_id=item.id)
30
+ item_ = self.fc.get_item(workspace_id=self.workspace_id,
31
+ item_name=self.item_name, item_type=self.item_type)
32
+ self.assertEqual(item.id, item_.id)
33
+ self.assertEqual(item.display_name, self.item_name)
34
+ self.assertEqual(item.type, self.item_type)
35
+ self.assertEqual(item.workspace_id, self.workspace_id)
36
+ self.assertEqual(item.description, "")
37
+
38
+ item_list = self.fc.list_items(workspace_id=self.workspace_id)
39
+ self.assertTrue(len(item_list) > 0)
40
+
41
+ item_ids = [item_.id for item_ in item_list]
42
+ self.assertIn(item.id, item_ids)
43
+
44
+ self.fc.update_item(workspace_id=self.workspace_id, item_id=item.id, display_name=f"u{self.item_name}", return_item=True)
45
+ item = self.fc.get_item(workspace_id=self.workspace_id, item_id=item.id)
46
+ self.assertEqual(item.display_name, f"u{self.item_name}")
47
+
48
+ status_code = self.fc.delete_item(workspace_id=self.workspace_id, item_id=item.id)
49
+
50
+ self.assertAlmostEqual(status_code, 200)
51
+
52
+ def test_item_definition(self):
53
+
54
+ sjd = self.fc.get_item(workspace_id=self.workspace_id, item_name="blubb", item_type="SparkJobDefinition")
55
+ self.assertIsNotNone(sjd.definition)
56
+ datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
57
+ blubb2 = "blubb2" + datetime_str
58
+ blubb3 = "blubb3" + datetime_str
59
+ blubb2 = self.fc.create_item(display_name=blubb2, type="SparkJobDefinition", workspace_id=self.workspace_id,
60
+ definition=sjd.definition)
61
+
62
+ blubb3 = self.fc.create_item(display_name=blubb3, type="SparkJobDefinition", workspace_id=self.workspace_id)
63
+
64
+ response = self.fc.update_item_definition(workspace_id=self.workspace_id,
65
+ item_id=blubb3.id, definition=sjd.definition)
66
+
67
+ self.assertEqual(response.status_code, 200)
68
+ blubb3 = self.fc.get_item(workspace_id=self.workspace_id, item_id=blubb3.id)
69
+ self.assertEqual(blubb3.definition, sjd.definition)
70
+
71
+ self.assertNotEqual(blubb2.id, sjd.id)
72
+ self.assertEqual(blubb2.definition, sjd.definition)
73
+ self.assertNotEqual(blubb2.id, blubb3.id)
74
+
75
+ blubb2.delete()
76
+ blubb3.delete()
77
+
78
+
79
+ if __name__ == "__main__":
80
+ unittest.main()
@@ -0,0 +1,50 @@
1
+ import unittest
2
+ from datetime import datetime
3
+ from dotenv import load_dotenv
4
+ from time import sleep
5
+ from msfabricpysdkcore.coreapi import FabricClientCore
6
+
7
+ load_dotenv()
8
+
9
+ class TestFabricClientCore(unittest.TestCase):
10
+
11
+ def __init__(self, *args, **kwargs):
12
+ super(TestFabricClientCore, self).__init__(*args, **kwargs)
13
+ #load_dotenv()
14
+ self.fc = FabricClientCore()
15
+ self.workspace_id = "c3352d34-0b54-40f0-b204-cc964b1beb8d"
16
+
17
+ datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
18
+ self.item_name = "testitem" + datetime_str
19
+ self.item_type = "Notebook"
20
+
21
+ def test_kql_querysets(self):
22
+
23
+ fc = self.fc
24
+ workspace_id = 'd8a5abe0-9eed-406d-ab46-343bc57ddbe5'
25
+
26
+ kql_queryset_name = "kqlqueryset1"
27
+
28
+ kql_querysets = fc.list_kql_querysets(workspace_id)
29
+ kql_queryset_names = [kqlq.display_name for kqlq in kql_querysets]
30
+ self.assertGreater(len(kql_querysets), 0)
31
+ self.assertIn(kql_queryset_name, kql_queryset_names)
32
+
33
+ kqlq = fc.get_kql_queryset(workspace_id, kql_queryset_name=kql_queryset_name)
34
+ self.assertIsNotNone(kqlq.id)
35
+ self.assertEqual(kqlq.display_name, kql_queryset_name)
36
+
37
+ kqlq2 = fc.update_kql_queryset(workspace_id, kqlq.id, display_name=f"{kql_queryset_name}2", return_item=True)
38
+
39
+ kqlq = fc.get_kql_queryset(workspace_id, kql_queryset_id=kqlq.id)
40
+ self.assertEqual(kqlq.display_name, f"{kql_queryset_name}2")
41
+ self.assertEqual(kqlq.id, kqlq2.id)
42
+
43
+ kqlq2 = fc.update_kql_queryset(workspace_id, kqlq.id, display_name=kql_queryset_name, return_item=True)
44
+
45
+ kqlq = fc.get_kql_queryset(workspace_id, kql_queryset_id=kqlq.id)
46
+ self.assertEqual(kqlq.display_name, kql_queryset_name)
47
+ self.assertEqual(kqlq.id, kqlq2.id)
48
+
49
+ # status_code = fc.delete_kql_queryset(workspace_id, kqlq.id)
50
+ # self.assertEqual(status_code, 200)
@@ -38,7 +38,7 @@ class TestFabricClientCore(unittest.TestCase):
38
38
  self.assertEqual(kqldb.display_name, kqldb_name)
39
39
 
40
40
  new_name = kqldb_name+"2"
41
- kqldb2 = fc.update_kql_database(workspace_id, kqldb.id, display_name=new_name)
41
+ kqldb2 = fc.update_kql_database(workspace_id, kqldb.id, display_name=new_name, return_item=True)
42
42
 
43
43
  kqldb = fc.get_kql_database(workspace_id, kql_database_id=kqldb.id)
44
44
  self.assertEqual(kqldb.display_name, new_name)
@@ -0,0 +1,86 @@
1
+ import unittest
2
+ from datetime import datetime
3
+ from dotenv import load_dotenv
4
+ from time import sleep
5
+ from msfabricpysdkcore.coreapi import FabricClientCore
6
+
7
+ load_dotenv()
8
+
9
+ class TestFabricClientCore(unittest.TestCase):
10
+
11
+ def __init__(self, *args, **kwargs):
12
+ super(TestFabricClientCore, self).__init__(*args, **kwargs)
13
+ #load_dotenv()
14
+ self.fc = FabricClientCore()
15
+ self.workspace_id = "c3352d34-0b54-40f0-b204-cc964b1beb8d"
16
+
17
+ datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
18
+ self.item_name = "testitem" + datetime_str
19
+ self.item_type = "Notebook"
20
+
21
+ def test_lakehouse(self):
22
+
23
+ lakehouse = self.fc.get_item(workspace_id=self.workspace_id, item_name="lakehouse1", item_type="Lakehouse")
24
+ self.assertIsNotNone(lakehouse.properties)
25
+ lakehouse_id = lakehouse.id
26
+ workspace_id = self.workspace_id
27
+ date_str = datetime.now().strftime("%Y%m%d%H%M%S")
28
+ table_name = f"table{date_str}"
29
+
30
+
31
+ status_code = self.fc.load_table(workspace_id=self.workspace_id, lakehouse_id=lakehouse_id, table_name=table_name,
32
+ path_type="File", relative_path="Files/folder1/titanic.csv")
33
+
34
+ self.assertEqual(status_code, 202)
35
+
36
+ # Run on demand table maintenance
37
+ table_name_maintenance = "table20240515114529"
38
+
39
+ execution_data = {
40
+ "tableName": table_name_maintenance,
41
+ "optimizeSettings": {
42
+ "vOrder": True,
43
+ "zOrderBy": [
44
+ "tipAmount"
45
+ ]
46
+ },
47
+ "vacuumSettings": {
48
+ "retentionPeriod": "7:01:00:00"
49
+ }
50
+ }
51
+
52
+ response = self.fc.run_on_demand_table_maintenance(workspace_id=workspace_id, lakehouse_id=lakehouse_id,
53
+ execution_data = execution_data,
54
+ job_type = "TableMaintenance", wait_for_completion = False)
55
+ self.assertIn(response.status_code, [200, 202])
56
+
57
+ table_list = self.fc.list_tables(workspace_id=self.workspace_id, lakehouse_id=lakehouse_id)
58
+ table_names = [table["name"] for table in table_list]
59
+
60
+ self.assertIn(table_name, table_names)
61
+
62
+ fc = self.fc
63
+ workspace_id = 'd8a5abe0-9eed-406d-ab46-343bc57ddbe5'
64
+
65
+ lakehouse = fc.create_lakehouse(workspace_id=workspace_id, display_name="lakehouse2")
66
+ self.assertIsNotNone(lakehouse.id)
67
+
68
+ lakehouses = fc.list_lakehouses(workspace_id)
69
+ lakehouse_names = [lh.display_name for lh in lakehouses]
70
+ self.assertGreater(len(lakehouse_names), 0)
71
+ self.assertIn("lakehouse2", lakehouse_names)
72
+
73
+ lakehouse2 = fc.get_lakehouse(workspace_id=workspace_id, lakehouse_id=lakehouse.id)
74
+ self.assertEqual(lakehouse.id, lakehouse2.id)
75
+
76
+ sleep(20)
77
+ lakehouse2 = fc.update_lakehouse(workspace_id=workspace_id, lakehouse_id=lakehouse.id, display_name="lakehouse3", return_item=True)
78
+ self.assertEqual(lakehouse2.display_name, "lakehouse3")
79
+
80
+ id = lakehouse2.id
81
+
82
+ lakehouse2 = fc.get_lakehouse(workspace_id=workspace_id, lakehouse_name="lakehouse3")
83
+ self.assertEqual(lakehouse2.id, id)
84
+
85
+ status_code = fc.delete_lakehouse(workspace_id=workspace_id, lakehouse_id=lakehouse.id)
86
+ self.assertEqual(status_code, 200)