msfabricpysdkcore 0.0.9__py3-none-any.whl → 0.0.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,5 +1,6 @@
1
+ import json
1
2
  from time import sleep
2
-
3
+ import requests
3
4
  from msfabricpysdkcore.item import Item
4
5
 
5
6
  class SparkJobDefinition(Item):
@@ -8,35 +9,71 @@ class SparkJobDefinition(Item):
8
9
  def __init__(self, id, display_name, type, workspace_id, auth, properties = None, definition=None, description=""):
9
10
  super().__init__(id, display_name, type, workspace_id, auth, properties, definition, description)
10
11
 
12
+ def from_dict(item_dict, auth):
13
+ return SparkJobDefinition(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
14
+ properties=item_dict.get('properties', None),
15
+ definition=item_dict.get('definition', None), description=item_dict.get('description', ""), auth=auth)
16
+
17
+ def get_definition(self, format=None):
18
+ return super().get_definition(type="sparkJobDefinitions", format=format)
19
+
20
+ def update_definition(self, definition):
21
+ return super().update_definition(definition=definition, type="sparkJobDefinitions")
22
+
11
23
  class Warehouse(Item):
12
24
  """Class to represent a warehouse in Microsoft Fabric"""
13
25
 
14
26
  def __init__(self, id, display_name, type, workspace_id, auth, properties = None, definition=None, description=""):
15
27
  super().__init__(id, display_name, type, workspace_id, auth, properties, definition, description)
16
28
 
29
+ def from_dict(item_dict, auth):
30
+ return Warehouse(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
31
+ properties=item_dict.get('properties', None),
32
+ definition=item_dict.get('definition', None), description=item_dict.get('description', ""), auth=auth)
33
+
17
34
  class KQLDatabase(Item):
18
35
  """Class to represent a kql database in Microsoft Fabric"""
19
36
 
20
37
  def __init__(self, id, display_name, type, workspace_id, auth, properties = None, definition=None, description=""):
21
38
  super().__init__(id, display_name, type, workspace_id, auth, properties, definition, description)
22
39
 
40
+ def from_dict(item_dict, auth):
41
+ return KQLDatabase(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
42
+ properties=item_dict.get('properties', None),
43
+ definition=item_dict.get('definition', None), description=item_dict.get('description', ""), auth=auth)
44
+
23
45
  class KQLQueryset(Item):
24
46
  """Class to represent a kql database in Microsoft Fabric"""
25
47
 
26
48
  def __init__(self, id, display_name, type, workspace_id, auth, properties = None, definition=None, description=""):
27
49
  super().__init__(id, display_name, type, workspace_id, auth, properties, definition, description)
28
50
 
51
+ def from_dict(item_dict, auth):
52
+ return KQLQueryset(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
53
+ properties=item_dict.get('properties', None),
54
+ definition=item_dict.get('definition', None), description=item_dict.get('description', ""), auth=auth)
55
+
29
56
  class Eventstream(Item):
30
57
  """Class to represent a eventstream in Microsoft Fabric"""
31
58
 
32
59
  def __init__(self, id, display_name, type, workspace_id, auth, properties = None, definition=None, description=""):
33
60
  super().__init__(id, display_name, type, workspace_id, auth, properties, definition, description)
34
-
61
+
62
+ def from_dict(item_dict, auth):
63
+ return Eventstream(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
64
+ properties=item_dict.get('properties', None),
65
+ definition=item_dict.get('definition', None), description=item_dict.get('description', ""), auth=auth)
66
+
35
67
  class MLExperiment(Item):
36
68
  """Class to represent a ml experiment in Microsoft Fabric"""
37
69
 
38
70
  def __init__(self, id, display_name, type, workspace_id, auth, properties = None, definition=None, description=""):
39
71
  super().__init__(id, display_name, type, workspace_id, auth, properties, definition, description)
72
+
73
+ def from_dict(item_dict, auth):
74
+ return MLExperiment(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
75
+ properties=item_dict.get('properties', None),
76
+ definition=item_dict.get('definition', None), description=item_dict.get('description', ""), auth=auth)
40
77
 
41
78
  class MLModel(Item):
42
79
  """Class to represent a ml model in Microsoft Fabric"""
@@ -44,29 +81,79 @@ class MLModel(Item):
44
81
  def __init__(self, id, display_name, type, workspace_id, auth, properties = None, definition=None, description=""):
45
82
  super().__init__(id, display_name, type, workspace_id, auth, properties, definition, description)
46
83
 
84
+ def from_dict(item_dict, auth):
85
+ return MLModel(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
86
+ properties=item_dict.get('properties', None),
87
+ definition=item_dict.get('definition', None), description=item_dict.get('description', ""), auth=auth)
88
+
47
89
  class Notebook(Item):
48
90
  """Class to represent a notebook in Microsoft Fabric"""
49
91
 
50
92
  def __init__(self, id, display_name, type, workspace_id, auth, properties = None, definition=None, description=""):
51
93
  super().__init__(id, display_name, type, workspace_id, auth, properties, definition, description)
52
94
 
95
+ def from_dict(item_dict, auth):
96
+ return Notebook(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
97
+ properties=item_dict.get('properties', None),
98
+ definition=item_dict.get('definition', None), description=item_dict.get('description', ""), auth=auth)
99
+
100
+ def get_definition(self, format=None):
101
+ """Method to get the definition of the notebook"""
102
+ return super().get_definition(type = "notebooks", format = format)
103
+
104
+ def update_definition(self, definition):
105
+ """Method to update the definition of the notebook"""
106
+ return super().update_definition(definition, type = "notebooks")
107
+
53
108
  class Report(Item):
54
109
  """Class to represent a report in Microsoft Fabric"""
55
110
 
56
111
  def __init__(self, id, display_name, type, workspace_id, auth, properties = None, definition=None, description=""):
57
112
  super().__init__(id, display_name, type, workspace_id, auth, properties, definition, description)
58
113
 
114
+ def from_dict(item_dict, auth):
115
+ return Report(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
116
+ properties=item_dict.get('properties', None),
117
+ definition=item_dict.get('definition', None), description=item_dict.get('description', ""), auth=auth)
118
+
119
+ def get_definition(self, type=None, format=None):
120
+ """Method to get the definition of the report"""
121
+ return super().get_definition(type = "reports", format = format)
122
+
123
+ def update_definition(self, definition):
124
+ """Method to update the definition of the report"""
125
+ return super().update_definition(definition, type = "reports")
126
+
59
127
  class SemanticModel(Item):
60
128
  """Class to represent a semantic model in Microsoft Fabric"""
61
129
 
62
130
  def __init__(self, id, display_name, type, workspace_id, auth, properties = None, definition=None, description=""):
63
131
  super().__init__(id, display_name, type, workspace_id, auth, properties, definition, description)
64
132
 
133
+ def from_dict(item_dict, auth):
134
+ return SemanticModel(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
135
+ properties=item_dict.get('properties', None),
136
+ definition=item_dict.get('definition', None), description=item_dict.get('description', ""), auth=auth)
137
+
138
+ def get_definition(self, format=None):
139
+ """Method to get the definition of the semantic model"""
140
+ return super().get_definition(type="semanticModels", format=format)
141
+
142
+ def update_definition(self, definition):
143
+ """Method to update the definition of the semantic model"""
144
+ return super().update_definition(definition, type = "semanticModels")
145
+
65
146
  class DataPipeline(Item):
66
147
  """Class to represent a spark job definition in Microsoft Fabric"""
67
148
 
68
149
  def __init__(self, id, display_name, type, workspace_id, auth, properties = None, definition=None, description=""):
69
150
  super().__init__(id, display_name, type, workspace_id, auth, properties, definition, description)
70
151
 
152
+ def from_dict(item_dict, auth):
153
+ return DataPipeline(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
154
+ properties=item_dict.get('properties', None),
155
+ definition=item_dict.get('definition', None), description=item_dict.get('description', ""), auth=auth)
156
+
71
157
  def run_on_demand_item_job(self, execution_data=None):
72
- return super().run_on_demand_item_job(job_type = "Pipeline", execution_data=execution_data)
158
+ return super().run_on_demand_item_job(job_type = "Pipeline", execution_data=execution_data)
159
+
@@ -0,0 +1,118 @@
1
+ import json
2
+ from time import sleep
3
+
4
+ import requests
5
+
6
+
7
+ class SparkCustomPool:
8
+ """Class to represent a custom pool in Microsoft Fabric"""
9
+
10
+ def __init__(self, id, name, type, node_family, node_size, auto_scale, dynamic_executor_allocation, workspace_id, auth) -> None:
11
+
12
+ self.id = id
13
+ self.name = name
14
+ self.type = type
15
+ self.node_family = node_family
16
+ self.node_size = node_size
17
+ self.auto_scale = auto_scale
18
+ self.dynamic_executor_allocation = dynamic_executor_allocation
19
+ self.workspace_id = workspace_id
20
+
21
+ self.auth = auth
22
+
23
+ def __str__(self) -> str:
24
+ """Return a string representation of the workspace object"""
25
+ dict_ = {
26
+ "id": self.id,
27
+ "name": self.name,
28
+ "type": self.type,
29
+ "nodeFamily": self.node_family,
30
+ "nodeSize": self.node_size,
31
+ "autoScale": self.auto_scale,
32
+ "dynamicExecutorAllocation": self.dynamic_executor_allocation,
33
+ "workspaceId": self.workspace_id
34
+ }
35
+ return json.dumps(dict_, indent=2)
36
+
37
+ def __repr__(self) -> str:
38
+ return self.__str__()
39
+
40
+ def from_dict(item_dict, auth):
41
+ """Create Item object from dictionary"""
42
+
43
+ if 'autoScale' not in item_dict:
44
+ item_dict['autoScale'] = item_dict['auto_scale']
45
+
46
+ if 'dynamicExecutorAllocation' not in item_dict:
47
+ item_dict['dynamicExecutorAllocation'] = item_dict['dynamic_executor_allocation']
48
+
49
+ if 'nodeFamily' not in item_dict:
50
+ item_dict['nodeFamily'] = item_dict['node_family']
51
+
52
+ if 'nodeSize' not in item_dict:
53
+ item_dict['nodeSize'] = item_dict['node_size']
54
+
55
+ return SparkCustomPool(id=item_dict['id'], name=item_dict['name'], type=item_dict['type'], node_family=item_dict['nodeFamily'],
56
+ node_size=item_dict['nodeSize'], auto_scale=item_dict['autoScale'], dynamic_executor_allocation=item_dict['dynamicExecutorAllocation'],
57
+ workspace_id=item_dict['workspaceId'], auth=auth)
58
+
59
+
60
+ def delete(self):
61
+ """Delete the custom pool item"""
62
+ # DELETE http://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/spark/pools/{poolId}
63
+
64
+ url = f"https://api.fabric.microsoft.com/v1/workspaces/{self.workspace_id}/spark/pools/{self.id}"
65
+ for _ in range(10):
66
+ response = requests.delete(url=url, headers=self.auth.get_headers())
67
+ if response.status_code == 429:
68
+ print("Too many requests, waiting 10 seconds")
69
+ sleep(10)
70
+ continue
71
+ if response.status_code not in (200, 429):
72
+ raise Exception(f"Error deleting spark pool: {response.status_code}, {response.text}")
73
+ break
74
+
75
+ return response.status_code
76
+
77
+
78
+ def update(self, name, node_family, node_size, auto_scale, dynamic_executor_allocation):
79
+ """Update the custom pool item"""
80
+ url = f"https://api.fabric.microsoft.com/v1/workspaces/{self.workspace_id}/spark/pools/{self.id}"
81
+ body = {}
82
+
83
+ if name is not None:
84
+ body['name'] = name
85
+ if node_family is not None:
86
+ body['nodeFamily'] = node_family
87
+ if node_size is not None:
88
+ body['nodeSize'] = node_size
89
+ if auto_scale is not None:
90
+ body['autoScale'] = auto_scale
91
+ if dynamic_executor_allocation is not None:
92
+ body['dynamicExecutorAllocation'] = dynamic_executor_allocation
93
+
94
+ if not body:
95
+ return self
96
+ for _ in range(10):
97
+ response = requests.patch(url=url, headers=self.auth.get_headers(), json=body)
98
+ if response.status_code == 429:
99
+ print("Too many requests, waiting 10 seconds")
100
+ sleep(10)
101
+ continue
102
+ if response.status_code not in (200, 429):
103
+ raise Exception(f"Error updating item: {response.status_code}, {response.text}")
104
+ break
105
+
106
+ if name is not None:
107
+ self.name = name
108
+ if node_family is not None:
109
+ self.node_family = node_family
110
+ if node_size is not None:
111
+ self.node_size = node_size
112
+ if auto_scale is not None:
113
+ self.auto_scale = auto_scale
114
+ if dynamic_executor_allocation is not None:
115
+ self.dynamic_executor_allocation = dynamic_executor_allocation
116
+
117
+ return self
118
+
@@ -1,16 +1,17 @@
1
1
  import unittest
2
- #from dotenv import load_dotenv
2
+ from dotenv import load_dotenv
3
3
  from msfabricpysdkcore import FabricClientAdmin
4
4
 
5
+ load_dotenv()
5
6
 
6
7
  class TestFabricClientCore(unittest.TestCase):
7
8
 
8
9
  def __init__(self, *args, **kwargs):
9
10
  super(TestFabricClientCore, self).__init__(*args, **kwargs)
10
-
11
+ self.fca = FabricClientAdmin()
11
12
 
12
- def test_domains(self):
13
- fca = FabricClientAdmin()
13
+ def test_admin_api(self):
14
+ fca = self.fca
14
15
 
15
16
  user_id = 'b4f4e299-e6e1-4667-886c-57e4a8dde1c2'
16
17
 
@@ -26,24 +27,24 @@ class TestFabricClientCore(unittest.TestCase):
26
27
 
27
28
  # Get workspace access details
28
29
 
29
- ws_access = fca.get_workspace_access_details(ws.id)
30
+ ws_access = fca.list_workspace_access_details(ws.id)
30
31
  principials = ws_access["accessDetails"]
31
32
  principials_ids = [p["principal"]["id"] for p in principials]
32
33
  self.assertIn(user_id, principials_ids)
33
34
 
34
35
  # Get access entities
35
36
 
36
- access_entities = fca.get_access_entities(user_id, type="Notebook")
37
+ access_entities = fca.list_access_entities(user_id, type="Notebook")
37
38
  self.assertGreater(len(access_entities), 0)
38
39
 
39
40
  # Get tenant settings
40
41
 
41
- tenant_settings = fca.get_tenant_settings()
42
+ tenant_settings = fca.list_tenant_settings()
42
43
  self.assertGreater(len(tenant_settings["tenantSettings"]), 0)
43
44
 
44
45
  # Get capacity tenant settings overrides
45
46
 
46
- overrides = fca.get_capacities_tenant_settings_overrides()
47
+ overrides = fca.list_capacities_tenant_settings_overrides()
47
48
  self.assertGreater(len(overrides), -1)
48
49
 
49
50
  # List items
@@ -58,7 +59,7 @@ class TestFabricClientCore(unittest.TestCase):
58
59
 
59
60
  # Get item access details
60
61
 
61
- item_access = fca.get_item_access_details(workspace_id=ws.id, item_id=item_list[0].id)
62
+ item_access = fca.list_item_access_details(workspace_id=ws.id, item_id=item_list[0].id)
62
63
  principials = item_access["accessDetails"]
63
64
 
64
65
  principials_ids = [p["principal"]["id"] for p in principials]
@@ -66,3 +67,13 @@ class TestFabricClientCore(unittest.TestCase):
66
67
  self.assertIn(user_id, principials_ids)
67
68
 
68
69
 
70
+ def test_labels(self):
71
+
72
+ fca = self.fca
73
+
74
+ items = [{"id": "d417b834-d381-454c-9cf0-c491f69508de", "type": "Lakehouse"}]
75
+ label_id = "defa4170-0d19-0005-000a-bc88714345d2"
76
+ resp = fca.bulk_set_labels(items=items, label_id=label_id)
77
+ self.assertEqual(resp["itemsChangeLabelStatus"][0]["status"], "Succeeded")
78
+ resp = fca.bulk_remove_labels(items=items)
79
+ self.assertEqual(resp["itemsChangeLabelStatus"][0]["status"], "Succeeded")
@@ -0,0 +1,64 @@
1
+ import unittest
2
+ from msfabricpysdkcore.coreapi import FabricClientCore
3
+ from dotenv import load_dotenv
4
+
5
+ load_dotenv()
6
+
7
+
8
+ class TestFabricClientCore(unittest.TestCase):
9
+
10
+ def __init__(self, *args, **kwargs):
11
+ super(TestFabricClientCore, self).__init__(*args, **kwargs)
12
+ self.fc = FabricClientCore()
13
+
14
+
15
+
16
+ def test_spark_workspace_custom_pools(self):
17
+ fc = self.fc
18
+
19
+ dep_pipes = fc.list_deployment_pipelines()
20
+
21
+ self.assertGreater(len(dep_pipes), 0)
22
+
23
+ self.assertIn("sdkpipe", [pipe.display_name for pipe in dep_pipes])
24
+
25
+ for pipe in dep_pipes:
26
+ if pipe.display_name == 'sdkpipe':
27
+ pipe_id = pipe.id
28
+ break
29
+
30
+ pipe = fc.get_deployment_pipeline(pipe_id)
31
+
32
+ self.assertEqual(pipe.display_name, 'sdkpipe')
33
+ self.assertEqual(pipe.id, pipe_id)
34
+
35
+ stages = fc.get_deployment_pipeline_stages(pipe_id)
36
+
37
+ self.assertGreater(len(stages), 0)
38
+ names = [stage.display_name for stage in stages]
39
+ self.assertIn("Development", names)
40
+ self.assertIn("Production", names)
41
+
42
+ dev_stage = [stage for stage in stages if stage.display_name == "Development"][0]
43
+ prod_stage = [stage for stage in stages if stage.display_name == "Production"][0]
44
+
45
+ items = fc.get_deployment_pipeline_stages_items(pipeline_id=pipe_id, stage_id=dev_stage.id)
46
+
47
+ self.assertGreater(len(items), 0)
48
+ self.assertIn("cicdlakehouse", [item["itemDisplayName"] for item in items])
49
+
50
+ items = [item for item in dev_stage.get_items() if item["itemDisplayName"] == 'cicdlakehouse']
51
+ item = items[0]
52
+ item = {"sourceItemId": item["itemId"],
53
+ "itemType": item["itemType"]}
54
+ items = [item]
55
+
56
+
57
+ response = pipe.deploy(source_stage_id=dev_stage.id,target_stage_id=prod_stage.id, items=items)
58
+
59
+ self.assertEqual(response["status"], "Succeeded")
60
+
61
+
62
+
63
+
64
+
@@ -1,9 +1,10 @@
1
1
  import unittest
2
- #from dotenv import load_dotenv
3
- from datetime import datetime
2
+ from dotenv import load_dotenv
4
3
  from msfabricpysdkcore import FabricClientCore, FabricClientAdmin
5
4
 
6
5
 
6
+ load_dotenv()
7
+
7
8
  class TestFabricClientCore(unittest.TestCase):
8
9
 
9
10
  def __init__(self, *args, **kwargs):
@@ -0,0 +1,48 @@
1
+ import unittest
2
+ from datetime import datetime
3
+ from dotenv import load_dotenv
4
+ from time import sleep
5
+ from msfabricpysdkcore.coreapi import FabricClientCore
6
+
7
+ load_dotenv()
8
+
9
+ class TestFabricClientCore(unittest.TestCase):
10
+
11
+ def __init__(self, *args, **kwargs):
12
+ super(TestFabricClientCore, self).__init__(*args, **kwargs)
13
+ #load_dotenv()
14
+ self.fc = FabricClientCore()
15
+ self.workspace_id = "d8a5abe0-9eed-406d-ab46-343bc57ddbe5"
16
+
17
+
18
+ def test_environments(self):
19
+ fc = self.fc
20
+ workspace_id = self.workspace_id
21
+
22
+ datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
23
+ envname = "testitem" + datetime_str
24
+ env1 = fc.create_environment(workspace_id, envname, "testenv description")
25
+
26
+ env2 = fc.get_environment(workspace_id, environment_name=envname)
27
+
28
+ self.assertEqual(env1.id, env2.id)
29
+
30
+ env_list = fc.list_environments(workspace_id, with_properties=True)
31
+ env_names = [env.display_name for env in env_list]
32
+
33
+ self.assertIn(envname, env_names)
34
+
35
+ env3 = fc.update_environment(workspace_id, env1.id, envname + "name", "testenv description updated")
36
+
37
+ env4 = fc.get_environment(workspace_id, environment_name=env3.display_name)
38
+
39
+ self.assertEqual(env1.id, env4.id)
40
+
41
+ fc.delete_environment(workspace_id, env4.id)
42
+
43
+ env_list = fc.list_environments(workspace_id, with_properties=True)
44
+ env_names = [env.display_name for env in env_list]
45
+
46
+ self.assertNotIn(env4.display_name, env_names)
47
+
48
+
@@ -1,7 +1,9 @@
1
1
  import unittest
2
2
  from msfabricpysdkcore.coreapi import FabricClientCore
3
3
  from datetime import datetime
4
- #from dotenv import load_dotenv
4
+ from dotenv import load_dotenv
5
+
6
+ load_dotenv()
5
7
 
6
8
  class TestFabricClientCore(unittest.TestCase):
7
9
 
@@ -1,9 +1,11 @@
1
1
  import unittest
2
2
  from datetime import datetime
3
- #from dotenv import load_dotenv
3
+ from dotenv import load_dotenv
4
4
  from time import sleep
5
5
  from msfabricpysdkcore.coreapi import FabricClientCore
6
6
 
7
+ load_dotenv()
8
+
7
9
  class TestFabricClientCore(unittest.TestCase):
8
10
 
9
11
  def __init__(self, *args, **kwargs):
@@ -104,17 +106,39 @@ class TestFabricClientCore(unittest.TestCase):
104
106
 
105
107
  lakehouse = self.fc.get_item(workspace_id=self.workspace_id, item_name="lakehouse1", item_type="Lakehouse")
106
108
  self.assertIsNotNone(lakehouse.properties)
107
- item_id = lakehouse.id
109
+ lakehouse_id = lakehouse.id
110
+ workspace_id = self.workspace_id
108
111
  date_str = datetime.now().strftime("%Y%m%d%H%M%S")
109
112
  table_name = f"table{date_str}"
110
113
 
111
114
 
112
- status_code = self.fc.load_table(workspace_id=self.workspace_id, item_id=item_id, table_name=table_name,
115
+ status_code = self.fc.load_table(workspace_id=self.workspace_id, lakehouse_id=lakehouse_id, table_name=table_name,
113
116
  path_type="File", relative_path="Files/folder1/titanic.csv")
114
117
 
115
118
  self.assertEqual(status_code, 202)
116
119
 
117
- table_list = self.fc.list_tables(workspace_id=self.workspace_id, item_id=item_id)
120
+ # Run on demand table maintenance
121
+ table_name_maintenance = "table20240515114529"
122
+
123
+ execution_data = {
124
+ "tableName": table_name_maintenance,
125
+ "optimizeSettings": {
126
+ "vOrder": True,
127
+ "zOrderBy": [
128
+ "tipAmount"
129
+ ]
130
+ },
131
+ "vacuumSettings": {
132
+ "retentionPeriod": "7:01:00:00"
133
+ }
134
+ }
135
+
136
+ response = self.fc.run_on_demand_table_maintenance(workspace_id=workspace_id, lakehouse_id=lakehouse_id,
137
+ execution_data = execution_data,
138
+ job_type = "TableMaintenance", wait_for_completion = True)
139
+ self.assertIn(response.status_code, [200, 202])
140
+
141
+ table_list = self.fc.list_tables(workspace_id=self.workspace_id, lakehouse_id=lakehouse_id)
118
142
  table_names = [table["name"] for table in table_list]
119
143
 
120
144
  self.assertIn(table_name, table_names)
@@ -163,6 +187,11 @@ class TestFabricClientCore(unittest.TestCase):
163
187
  self.assertIsNotNone(dp.definition)
164
188
  self.assertEqual(dp.display_name, "pipeline1")
165
189
 
190
+ dp_new = fc.create_data_pipeline(workspace_id, display_name="pipeline_new", description="asda")
191
+ dp_new.update_definition(dp.definition)
192
+
193
+ self.assertEqual(dp_new.display_name, "pipeline_new")
194
+
166
195
  dp2 = fc.update_data_pipeline(workspace_id, dp.id, display_name="pipeline2")
167
196
 
168
197
  dp = fc.get_data_pipeline(workspace_id, data_pipeline_id=dp.id)
@@ -174,8 +203,8 @@ class TestFabricClientCore(unittest.TestCase):
174
203
  dp = fc.get_data_pipeline(workspace_id, data_pipeline_id=dp.id)
175
204
  self.assertEqual(dp.display_name, "pipeline1")
176
205
  self.assertEqual(dp.id, dp2.id)
177
- # status_code = fc.delete_data_pipeline(workspace_id, dp.id)
178
- # self.assertEqual(status_code, 200)
206
+ status_code = fc.delete_data_pipeline(workspace_id, dp_new.id)
207
+ self.assertEqual(status_code, 200)
179
208
 
180
209
  def test_eventstreams(self):
181
210
 
@@ -222,14 +251,24 @@ class TestFabricClientCore(unittest.TestCase):
222
251
  kqldb = fc.get_kql_database(workspace_id, kql_database_name="kqldatabase1")
223
252
  self.assertIsNotNone(kqldb.id)
224
253
  self.assertEqual(kqldb.display_name, "kqldatabase1")
225
-
254
+
226
255
  kqldb2 = fc.update_kql_database(workspace_id, kqldb.id, display_name="kqldb2")
227
256
 
228
257
  kqldb = fc.get_kql_database(workspace_id, kql_database_id=kqldb.id)
229
258
  self.assertEqual(kqldb.display_name, "kqldb2")
230
259
  self.assertEqual(kqldb.id, kqldb2.id)
231
260
 
232
- kqldb2 = fc.update_kql_database(workspace_id, kqldb.id, display_name="kqldatabase1")
261
+ # retry 3 times
262
+ i = 0
263
+ while i < 3:
264
+ try:
265
+ kqldb2 = fc.update_kql_database(workspace_id, kqldb.id, display_name="kqldatabase1")
266
+ break
267
+ except Exception as e:
268
+ sleep(10)
269
+ i += 1
270
+
271
+
233
272
 
234
273
  kqldb = fc.get_kql_database(workspace_id, kql_database_id=kqldb.id)
235
274
  self.assertEqual(kqldb.display_name, "kqldatabase1")
@@ -335,8 +374,12 @@ class TestFabricClientCore(unittest.TestCase):
335
374
  notebook_name = "notebook125"
336
375
 
337
376
  notebook_w_content = fc.get_notebook(workspace_id, notebook_name="HelloWorld")
338
- definition = notebook_w_content.definition
339
377
 
378
+ definition = fc.get_notebook_definition(workspace_id, notebook_w_content.id)
379
+
380
+ self.assertIsNotNone(definition)
381
+ self.assertIn("definition", definition)
382
+ definition = definition["definition"]
340
383
  notebook = fc.create_notebook(workspace_id, definition=definition, display_name=notebook_name)
341
384
  fc.update_notebook_definition(workspace_id, notebook.id, definition=definition)
342
385
  notebook = fc.get_notebook(workspace_id, notebook_id=notebook.id)
@@ -370,7 +413,13 @@ class TestFabricClientCore(unittest.TestCase):
370
413
  report_name = "report1234"
371
414
 
372
415
  report_w_content = fc.get_report(workspace_id, report_name="HelloWorldReport")
373
- definition = report_w_content.definition
416
+
417
+ definition = fc.get_report_definition(workspace_id, report_w_content.id)
418
+
419
+ self.assertIsNotNone(definition)
420
+ self.assertIn("definition", definition)
421
+ definition = definition["definition"]
422
+
374
423
  report = fc.create_report(workspace_id, display_name=report_name, definition=definition)
375
424
  fc.update_report_definition(workspace_id, report.id, definition=definition)
376
425
  report = fc.get_report(workspace_id, report_id=report.id)
@@ -398,7 +447,12 @@ class TestFabricClientCore(unittest.TestCase):
398
447
  semantic_model_name = "semanticmodel1234"
399
448
 
400
449
  semantic_model_w_content = fc.get_semantic_model(workspace_id, semantic_model_name="Table")
401
- definition = semantic_model_w_content.definition
450
+
451
+ definition = fc.get_semantic_model_definition(workspace_id, semantic_model_w_content.id)
452
+
453
+ self.assertIsNotNone(definition)
454
+ self.assertIn("definition", definition)
455
+ definition = definition["definition"]
402
456
  semantic_model = fc.create_semantic_model(workspace_id, display_name=semantic_model_name, definition=definition)
403
457
  fc.update_semantic_model_definition(workspace_id, semantic_model.id, definition=definition)
404
458
  semantic_model = fc.get_semantic_model(workspace_id, semantic_model_id=semantic_model.id)
@@ -428,7 +482,13 @@ class TestFabricClientCore(unittest.TestCase):
428
482
  spark_job_definition_name = f"sjd{datetime_str}"
429
483
 
430
484
  spark_job_definition_w_content = fc.get_spark_job_definition(workspace_id, spark_job_definition_name="helloworld")
431
- definition = spark_job_definition_w_content.definition
485
+
486
+ definition = fc.get_spark_job_definition_definition(workspace_id, spark_job_definition_w_content.id)
487
+
488
+ self.assertIsNotNone(definition)
489
+ self.assertIn("definition", definition)
490
+ definition = definition["definition"]
491
+
432
492
  spark_job_definition = fc.create_spark_job_definition(workspace_id, display_name=spark_job_definition_name)
433
493
  fc.update_spark_job_definition_definition(workspace_id, spark_job_definition.id, definition=definition)
434
494
  spark_job_definition = fc.get_spark_job_definition(workspace_id, spark_job_definition_id=spark_job_definition.id)
@@ -1,5 +1,9 @@
1
1
  import unittest
2
2
  from msfabricpysdkcore.coreapi import FabricClientCore
3
+ from dotenv import load_dotenv
4
+
5
+ load_dotenv()
6
+
3
7
 
4
8
  class TestFabricClientCore(unittest.TestCase):
5
9