msfabricpysdkcore 0.0.9__py3-none-any.whl → 0.0.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. msfabricpysdkcore/admin_item.py +7 -0
  2. msfabricpysdkcore/admin_workspace.py +20 -1
  3. msfabricpysdkcore/adminapi.py +133 -7
  4. msfabricpysdkcore/auth.py +9 -6
  5. msfabricpysdkcore/client.py +5 -4
  6. msfabricpysdkcore/coreapi.py +341 -17
  7. msfabricpysdkcore/deployment_pipeline.py +240 -0
  8. msfabricpysdkcore/environment.py +209 -0
  9. msfabricpysdkcore/item.py +12 -11
  10. msfabricpysdkcore/lakehouse.py +42 -1
  11. msfabricpysdkcore/long_running_operation.py +2 -6
  12. msfabricpysdkcore/otheritems.py +122 -3
  13. msfabricpysdkcore/spark_custom_pool.py +118 -0
  14. msfabricpysdkcore/tests/test_admin_apis.py +20 -9
  15. msfabricpysdkcore/tests/test_datapipelines.py +48 -0
  16. msfabricpysdkcore/tests/test_deployment_pipeline.py +64 -0
  17. msfabricpysdkcore/tests/test_domains.py +3 -2
  18. msfabricpysdkcore/tests/test_environments.py +65 -0
  19. msfabricpysdkcore/tests/test_evenstreams.py +44 -0
  20. msfabricpysdkcore/tests/test_git.py +3 -1
  21. msfabricpysdkcore/tests/test_items_incl_lakehouse.py +81 -109
  22. msfabricpysdkcore/tests/test_jobs.py +4 -0
  23. msfabricpysdkcore/tests/test_kqldatabases.py +48 -0
  24. msfabricpysdkcore/tests/test_shortcuts.py +3 -1
  25. msfabricpysdkcore/tests/test_spark.py +91 -0
  26. msfabricpysdkcore/tests/test_workspaces_capacities.py +6 -5
  27. msfabricpysdkcore/workspace.py +358 -32
  28. {msfabricpysdkcore-0.0.9.dist-info → msfabricpysdkcore-0.0.11.dist-info}/METADATA +82 -32
  29. msfabricpysdkcore-0.0.11.dist-info/RECORD +38 -0
  30. msfabricpysdkcore-0.0.9.dist-info/RECORD +0 -29
  31. {msfabricpysdkcore-0.0.9.dist-info → msfabricpysdkcore-0.0.11.dist-info}/LICENSE +0 -0
  32. {msfabricpysdkcore-0.0.9.dist-info → msfabricpysdkcore-0.0.11.dist-info}/WHEEL +0 -0
  33. {msfabricpysdkcore-0.0.9.dist-info → msfabricpysdkcore-0.0.11.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,209 @@
1
+ import json
2
+ import requests
3
+ from time import sleep
4
+
5
+ from msfabricpysdkcore.item import Item
6
+ from msfabricpysdkcore.long_running_operation import check_long_running_operation
7
+
8
+ class Environment(Item):
9
+ """Class to represent a item in Microsoft Fabric"""
10
+
11
+ def __init__(self, id, display_name, type, workspace_id, auth, properties = None, definition=None, description=""):
12
+ super().__init__(id, display_name, type, workspace_id, auth, properties, definition, description)
13
+
14
+ def from_dict(item_dict, auth):
15
+ return Environment(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
16
+ properties=item_dict.get('properties', None),
17
+ definition=item_dict.get('definition', None), description=item_dict.get('description', ""),
18
+ auth=auth)
19
+
20
+ # GET https://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/environments/{environmentId}/sparkcompute
21
+ def get_published_settings(self):
22
+ """Get the published settings of the environment"""
23
+ url = f"https://api.fabric.microsoft.com/v1/workspaces/{self.workspace_id}/environments/{self.id}/sparkcompute"
24
+
25
+ for _ in range(10):
26
+ response = requests.get(url=url, headers=self.auth.get_headers())
27
+ if response.status_code == 429:
28
+ print("Too many requests, waiting 10 seconds")
29
+ sleep(10)
30
+ continue
31
+ if response.status_code not in (200, 429):
32
+ raise Exception(f"Error getting published settings: {response.status_code}, {response.text}")
33
+ break
34
+
35
+ resp_json = json.loads(response.text)
36
+ return resp_json
37
+
38
+ # GET https://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/environments/{environmentId}/staging/sparkcompute
39
+
40
+ def get_staging_settings(self):
41
+ """Get the staging settings of the environment"""
42
+ url = f"https://api.fabric.microsoft.com/v1/workspaces/{self.workspace_id}/environments/{self.id}/staging/sparkcompute"
43
+
44
+ for _ in range(10):
45
+ response = requests.get(url=url, headers=self.auth.get_headers())
46
+ if response.status_code == 429:
47
+ print("Too many requests, waiting 10 seconds")
48
+ sleep(10)
49
+ continue
50
+ if response.status_code not in (200, 429):
51
+ raise Exception(f"Error getting staging settings: {response.status_code}, {response.text}")
52
+ break
53
+
54
+ resp_json = json.loads(response.text)
55
+ return resp_json
56
+
57
+
58
+ def update_staging_settings(self,
59
+ driver_cores = None, driver_memory = None, dynamic_executor_allocation = None,
60
+ executor_cores = None, executor_memory = None, instance_pool = None,
61
+ runtime_version = None, spark_properties = None):
62
+ """Update the staging settings of the environment"""
63
+ url = f"https://api.fabric.microsoft.com/v1/workspaces/{self.workspace_id}/environments/{self.id}/staging/sparkcompute"
64
+ body = {}
65
+ if driver_cores is not None:
66
+ body['driverCores'] = driver_cores
67
+ if driver_memory is not None:
68
+ body['driverMemory'] = driver_memory
69
+ if dynamic_executor_allocation is not None:
70
+ body['dynamicExecutorAllocation'] = dynamic_executor_allocation
71
+ if executor_cores is not None:
72
+ body['executorCores'] = executor_cores
73
+ if executor_memory is not None:
74
+ body['executorMemory'] = executor_memory
75
+ if instance_pool is not None:
76
+ body['instancePool'] = instance_pool
77
+ if runtime_version is not None:
78
+ body['runtimeVersion'] = runtime_version
79
+ if spark_properties is not None:
80
+ body['sparkProperties'] = spark_properties
81
+
82
+
83
+ for _ in range(10):
84
+ response = requests.patch(url=url, headers=self.auth.get_headers(), json=body)
85
+ if response.status_code == 429:
86
+ print("Too many requests, waiting 10 seconds")
87
+ sleep(10)
88
+ continue
89
+ if response.status_code not in (200, 429):
90
+ raise Exception(f"Error updating staging settings: {response.status_code}, {response.text}")
91
+ break
92
+
93
+ return json.loads(response.text)
94
+
95
+ # GET https://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/environments/{environmentId}/libraries
96
+
97
+ def get_published_libraries(self):
98
+ """Get the published libraries of the environment"""
99
+ url = f"https://api.fabric.microsoft.com/v1/workspaces/{self.workspace_id}/environments/{self.id}/libraries"
100
+
101
+ for _ in range(10):
102
+ response = requests.get(url=url, headers=self.auth.get_headers())
103
+ if response.status_code == 429:
104
+ print("Too many requests, waiting 10 seconds")
105
+ sleep(10)
106
+ continue
107
+ if response.status_code not in (200, 429):
108
+ raise Exception(f"Error getting published libraries: {response.status_code}, {response.text}")
109
+ break
110
+
111
+ resp_json = json.loads(response.text)
112
+ return resp_json
113
+
114
+ # GET https://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/environments/{environmentId}/staging/libraries
115
+
116
+ def get_staging_libraries(self):
117
+ """Get the staging libraries of the environment"""
118
+ url = f"https://api.fabric.microsoft.com/v1/workspaces/{self.workspace_id}/environments/{self.id}/staging/libraries"
119
+
120
+ for _ in range(10):
121
+ response = requests.get(url=url, headers=self.auth.get_headers())
122
+ if response.status_code == 429:
123
+ print("Too many requests, waiting 10 seconds")
124
+ sleep(10)
125
+ continue
126
+ if response.status_code not in (200, 429):
127
+ raise Exception(f"Error getting staging libraries: {response.status_code}, {response.text}")
128
+ break
129
+
130
+ resp_json = json.loads(response.text)
131
+ return resp_json
132
+
133
+
134
+ def upload_staging_library(self, file_path):
135
+ # POST https://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/environments/{environmentId}/staging/libraries
136
+ raise NotImplementedError("Not implemented yet")
137
+ # url = f"https://api.fabric.microsoft.com/v1/workspaces/{self.workspace_id}/environments/{self.id}/staging/libraries"
138
+
139
+ # for _ in range(10):
140
+ # response = requests.post(url=url, files={'file': file_path}, headers=self.auth.get_headers())
141
+ # if response.status_code == 429:
142
+ # print("Too many requests, waiting 10 seconds")
143
+ # sleep(10)
144
+ # continue
145
+ # if response.status_code not in (200, 429):
146
+ # raise Exception(f"Error uploading staging libraries: {response.status_code}, {response.text}")
147
+ # break
148
+
149
+ # return json.loads(response.text)
150
+
151
+ # DELETE https://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/environments/{environmentId}/staging/libraries?libraryToDelete={libraryToDelete}
152
+
153
+ def delete_staging_library(self, library_to_delete):
154
+ """Delete a library from the staging libraries of the environment"""
155
+ url = f"https://api.fabric.microsoft.com/v1/workspaces/{self.workspace_id}/environments/{self.id}/staging/libraries?libraryToDelete={library_to_delete}"
156
+
157
+ for _ in range(10):
158
+ response = requests.delete(url=url, headers=self.auth.get_headers())
159
+ if response.status_code == 429:
160
+ print("Too many requests, waiting 10 seconds")
161
+ sleep(10)
162
+ continue
163
+ if response.status_code not in (200, 429):
164
+ raise Exception(f"Error deleting staging libraries: {response.status_code}, {response.text}")
165
+ break
166
+
167
+ return response.text
168
+
169
+ # POST https://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/environments/{environmentId}/staging/publish
170
+
171
+ def publish_environment(self):
172
+ """Publish the staging settings and libraries of the environment"""
173
+ url = f"https://api.fabric.microsoft.com/v1/workspaces/{self.workspace_id}/environments/{self.id}/staging/publish"
174
+
175
+ for _ in range(10):
176
+ response = requests.post(url=url, headers=self.auth.get_headers())
177
+ if response.status_code == 429:
178
+ print("Too many requests, waiting 10 seconds")
179
+ sleep(10)
180
+ continue
181
+ if response.status_code == 202:
182
+ publish_info = check_long_running_operation(response.headers, self.auth)
183
+ return publish_info
184
+ if response.status_code not in (200, 429):
185
+ raise Exception(f"Error publishing staging: {response.status_code}, {response.text}")
186
+ break
187
+
188
+ resp_dict = json.loads(response.text)
189
+ return resp_dict
190
+
191
+
192
+ # POST https://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/environments/{environmentId}/staging/cancelPublish
193
+
194
+ def cancel_publish(self):
195
+ """Cancel the publishing of the staging settings and libraries of the environment"""
196
+ url = f"https://api.fabric.microsoft.com/v1/workspaces/{self.workspace_id}/environments/{self.id}/staging/cancelPublish"
197
+
198
+ for _ in range(10):
199
+ response = requests.post(url=url, headers=self.auth.get_headers())
200
+ if response.status_code == 429:
201
+ print("Too many requests, waiting 10 seconds")
202
+ sleep(10)
203
+ continue
204
+ if response.status_code not in (200, 429):
205
+ raise Exception(f"Error canceling publishing: {response.status_code}, {response.text}")
206
+ break
207
+
208
+ resp_dict = json.loads(response.text)
209
+ return resp_dict
msfabricpysdkcore/item.py CHANGED
@@ -41,11 +41,6 @@ class Item:
41
41
  def from_dict(item_dict, auth):
42
42
  """Create Item object from dictionary"""
43
43
 
44
- if item_dict['type'] == "Lakehouse":
45
- from msfabricpysdkcore.lakehouse import Lakehouse
46
- return Lakehouse(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
47
- properties=item_dict.get('properties', None),
48
- definition=item_dict.get('definition', None), description=item_dict.get('description', ""), auth=auth)
49
44
  return Item(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
50
45
  properties=item_dict.get('properties', None),
51
46
  definition=item_dict.get('definition', None), description=item_dict.get('description', ""), auth=auth)
@@ -63,18 +58,20 @@ class Item:
63
58
  sleep(10)
64
59
  continue
65
60
  if response.status_code not in (200, 429):
66
- print(response.status_code)
67
- print(response.text)
68
- print(self)
69
- raise Exception(f"Error deleting item: {response.text}")
61
+ raise Exception(f"Error deleting item: {response.status_code}, {response.text}")
70
62
  break
71
63
 
72
64
  return response.status_code
73
65
 
74
- def get_definition(self):
66
+ def get_definition(self, type = None, format = None):
75
67
  """Get the definition of the item"""
76
68
 
77
69
  url = f"https://api.fabric.microsoft.com/v1/workspaces/{self.workspace_id}/items/{self.id}/getDefinition"
70
+ if type:
71
+ url = f"https://api.fabric.microsoft.com/v1/workspaces/{self.workspace_id}/{type}/{self.id}/getDefinition"
72
+
73
+ if format:
74
+ url += f"?format={format}"
78
75
 
79
76
  for _ in range(10):
80
77
  response = requests.post(url=url, headers=self.auth.get_headers())
@@ -127,9 +124,13 @@ class Item:
127
124
 
128
125
  return self
129
126
 
130
- def update_definition(self, definition):
127
+ def update_definition(self, definition, type = None):
131
128
  """Update the item definition"""
132
129
  url = f"https://api.fabric.microsoft.com/v1/workspaces/{self.workspace_id}/items/{self.id}/updateDefinition"
130
+
131
+ if type:
132
+ url = f"https://api.fabric.microsoft.com/v1/workspaces/{self.workspace_id}/{type}/{self.id}/updateDefinition"
133
+
133
134
  payload = {
134
135
  'definition': definition
135
136
  }
@@ -3,6 +3,7 @@ import requests
3
3
  from time import sleep
4
4
 
5
5
  from msfabricpysdkcore.item import Item
6
+ from msfabricpysdkcore.long_running_operation import check_long_running_operation
6
7
 
7
8
  class Lakehouse(Item):
8
9
  """Class to represent a item in Microsoft Fabric"""
@@ -10,6 +11,10 @@ class Lakehouse(Item):
10
11
  def __init__(self, id, display_name, type, workspace_id, auth, properties = None, definition=None, description=""):
11
12
  super().__init__(id, display_name, type, workspace_id, auth, properties, definition, description)
12
13
 
14
+ def from_dict(item_dict, auth):
15
+ return Lakehouse(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
16
+ properties=item_dict.get('properties', None),
17
+ definition=item_dict.get('definition', None), description=item_dict.get('description', ""), auth=auth)
13
18
 
14
19
  def list_tables(self, continuationToken = None):
15
20
  """List all tables in the lakehouse"""
@@ -92,4 +97,40 @@ class Lakehouse(Item):
92
97
 
93
98
  sleep(3)
94
99
  return False
95
-
100
+
101
+ # run on demand table maintenance
102
+ # POST https://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/lakehouses/{lakehouseId}/jobs/instances?jobType={jobType}
103
+
104
+ def run_on_demand_table_maintenance(self, execution_data, job_type = "TableMaintenance", wait_for_completion = True):
105
+ """Run on demand table maintenance"""
106
+ url = f"https://api.fabric.microsoft.com/v1/workspaces/{self.workspace_id}/lakehouses/{self.id}/jobs/instances?jobType={job_type}"
107
+
108
+ body = {
109
+ "executionData": execution_data
110
+ }
111
+
112
+ for _ in range(10):
113
+ response = requests.post(url=url, headers=self.auth.get_headers(), json=body)
114
+ if response.status_code == 429:
115
+ print("Too many requests, waiting 10 seconds")
116
+ sleep(10)
117
+ continue
118
+ if response.status_code == 202 and wait_for_completion:
119
+ print("successfully started the operation")
120
+ try:
121
+ operation_result = check_long_running_operation( response.headers, self.auth)
122
+ return operation_result
123
+ except Exception as e:
124
+
125
+ print("Problem waiting for long running operation. Returning initial response.")
126
+ print(e)
127
+ return response
128
+
129
+ if response.status_code not in (200, 202, 429):
130
+ print(response.status_code)
131
+ print(response.text)
132
+
133
+ raise Exception(f"Error at run on demand table maintenance: {response.text}")
134
+ break
135
+
136
+ return response
@@ -25,9 +25,7 @@ class LongRunningOperation:
25
25
  if response.status_code == 400:
26
26
  return None
27
27
  if response.status_code not in (200, 429):
28
- print(response.status_code)
29
- print(response.text)
30
- raise Exception(f"Error getting operation results: {response.text}")
28
+ raise Exception(f"Error getting operation results: {response.status_code}, {response.text}")
31
29
  break
32
30
 
33
31
  return json.loads(response.text)
@@ -43,9 +41,7 @@ class LongRunningOperation:
43
41
  sleep(10)
44
42
  continue
45
43
  if response.status_code not in (200, 429):
46
- print(response.status_code)
47
- print(response.text)
48
- raise Exception(f"Error getting operation state: {response.text}")
44
+ raise Exception(f"Error getting operation state: {response.status_code}, {response.text}")
49
45
  break
50
46
 
51
47
  return json.loads(response.text)
@@ -1,42 +1,111 @@
1
+ import json
1
2
  from time import sleep
2
-
3
+ import requests
3
4
  from msfabricpysdkcore.item import Item
4
5
 
6
+
7
+ class Eventhouse(Item):
8
+ """Class to represent a eventhouse in Microsoft Fabric"""
9
+
10
+ def __init__(self, id, display_name, type, workspace_id, auth, properties = None, description=""):
11
+ super().__init__(id = id, display_name=display_name, type=type,
12
+ workspace_id=workspace_id, auth=auth, properties=properties,
13
+ description=description)
14
+
15
+ def from_dict(item_dict, auth):
16
+ if "displayName" not in item_dict:
17
+ item_dict["displayName"] = item_dict["display_name"]
18
+ if "workspaceId" not in item_dict:
19
+ item_dict["workspaceId"] = item_dict["workspace_id"]
20
+
21
+ return Eventhouse(id=item_dict['id'], display_name=item_dict['displayName'],
22
+ type=item_dict['type'], workspace_id=item_dict['workspaceId'],
23
+ properties=item_dict.get('properties', None),
24
+ description=item_dict.get('description', ""), auth=auth)
25
+
26
+ def create_kql_database(self, display_name = None, description= None):
27
+ from msfabricpysdkcore.coreapi import FabricClientCore
28
+ """Method to create a kql database in the eventhouse"""
29
+ creation_payload = {"databaseType" : "ReadWrite",
30
+ "parentEventhouseItemId" : self.id}
31
+
32
+ fcc = FabricClientCore(silent=True)
33
+
34
+ return fcc.create_kql_database(workspace_id = self.workspace_id,
35
+ display_name = display_name, description = description,
36
+ creation_payload= creation_payload)
37
+
5
38
  class SparkJobDefinition(Item):
6
39
  """Class to represent a spark job definition in Microsoft Fabric"""
7
40
 
8
41
  def __init__(self, id, display_name, type, workspace_id, auth, properties = None, definition=None, description=""):
9
42
  super().__init__(id, display_name, type, workspace_id, auth, properties, definition, description)
10
43
 
44
+ def from_dict(item_dict, auth):
45
+ return SparkJobDefinition(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
46
+ properties=item_dict.get('properties', None),
47
+ definition=item_dict.get('definition', None), description=item_dict.get('description', ""), auth=auth)
48
+
49
+ def get_definition(self, format=None):
50
+ return super().get_definition(type="sparkJobDefinitions", format=format)
51
+
52
+ def update_definition(self, definition):
53
+ return super().update_definition(definition=definition, type="sparkJobDefinitions")
54
+
11
55
  class Warehouse(Item):
12
56
  """Class to represent a warehouse in Microsoft Fabric"""
13
57
 
14
58
  def __init__(self, id, display_name, type, workspace_id, auth, properties = None, definition=None, description=""):
15
59
  super().__init__(id, display_name, type, workspace_id, auth, properties, definition, description)
16
60
 
61
+ def from_dict(item_dict, auth):
62
+ return Warehouse(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
63
+ properties=item_dict.get('properties', None),
64
+ definition=item_dict.get('definition', None), description=item_dict.get('description', ""), auth=auth)
65
+
17
66
  class KQLDatabase(Item):
18
67
  """Class to represent a kql database in Microsoft Fabric"""
19
68
 
20
69
  def __init__(self, id, display_name, type, workspace_id, auth, properties = None, definition=None, description=""):
21
70
  super().__init__(id, display_name, type, workspace_id, auth, properties, definition, description)
22
71
 
72
+ def from_dict(item_dict, auth):
73
+ return KQLDatabase(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
74
+ properties=item_dict.get('properties', None),
75
+ definition=item_dict.get('definition', None), description=item_dict.get('description', ""), auth=auth)
76
+
23
77
  class KQLQueryset(Item):
24
78
  """Class to represent a kql database in Microsoft Fabric"""
25
79
 
26
80
  def __init__(self, id, display_name, type, workspace_id, auth, properties = None, definition=None, description=""):
27
81
  super().__init__(id, display_name, type, workspace_id, auth, properties, definition, description)
28
82
 
83
+ def from_dict(item_dict, auth):
84
+ return KQLQueryset(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
85
+ properties=item_dict.get('properties', None),
86
+ definition=item_dict.get('definition', None), description=item_dict.get('description', ""), auth=auth)
87
+
29
88
  class Eventstream(Item):
30
89
  """Class to represent a eventstream in Microsoft Fabric"""
31
90
 
32
91
  def __init__(self, id, display_name, type, workspace_id, auth, properties = None, definition=None, description=""):
33
92
  super().__init__(id, display_name, type, workspace_id, auth, properties, definition, description)
34
-
93
+
94
+ def from_dict(item_dict, auth):
95
+ return Eventstream(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
96
+ properties=item_dict.get('properties', None),
97
+ definition=item_dict.get('definition', None), description=item_dict.get('description', ""), auth=auth)
98
+
35
99
  class MLExperiment(Item):
36
100
  """Class to represent a ml experiment in Microsoft Fabric"""
37
101
 
38
102
  def __init__(self, id, display_name, type, workspace_id, auth, properties = None, definition=None, description=""):
39
103
  super().__init__(id, display_name, type, workspace_id, auth, properties, definition, description)
104
+
105
+ def from_dict(item_dict, auth):
106
+ return MLExperiment(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
107
+ properties=item_dict.get('properties', None),
108
+ definition=item_dict.get('definition', None), description=item_dict.get('description', ""), auth=auth)
40
109
 
41
110
  class MLModel(Item):
42
111
  """Class to represent a ml model in Microsoft Fabric"""
@@ -44,29 +113,79 @@ class MLModel(Item):
44
113
  def __init__(self, id, display_name, type, workspace_id, auth, properties = None, definition=None, description=""):
45
114
  super().__init__(id, display_name, type, workspace_id, auth, properties, definition, description)
46
115
 
116
+ def from_dict(item_dict, auth):
117
+ return MLModel(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
118
+ properties=item_dict.get('properties', None),
119
+ definition=item_dict.get('definition', None), description=item_dict.get('description', ""), auth=auth)
120
+
47
121
  class Notebook(Item):
48
122
  """Class to represent a notebook in Microsoft Fabric"""
49
123
 
50
124
  def __init__(self, id, display_name, type, workspace_id, auth, properties = None, definition=None, description=""):
51
125
  super().__init__(id, display_name, type, workspace_id, auth, properties, definition, description)
52
126
 
127
+ def from_dict(item_dict, auth):
128
+ return Notebook(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
129
+ properties=item_dict.get('properties', None),
130
+ definition=item_dict.get('definition', None), description=item_dict.get('description', ""), auth=auth)
131
+
132
+ def get_definition(self, format=None):
133
+ """Method to get the definition of the notebook"""
134
+ return super().get_definition(type = "notebooks", format = format)
135
+
136
+ def update_definition(self, definition):
137
+ """Method to update the definition of the notebook"""
138
+ return super().update_definition(definition, type = "notebooks")
139
+
53
140
  class Report(Item):
54
141
  """Class to represent a report in Microsoft Fabric"""
55
142
 
56
143
  def __init__(self, id, display_name, type, workspace_id, auth, properties = None, definition=None, description=""):
57
144
  super().__init__(id, display_name, type, workspace_id, auth, properties, definition, description)
58
145
 
146
+ def from_dict(item_dict, auth):
147
+ return Report(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
148
+ properties=item_dict.get('properties', None),
149
+ definition=item_dict.get('definition', None), description=item_dict.get('description', ""), auth=auth)
150
+
151
+ def get_definition(self, type=None, format=None):
152
+ """Method to get the definition of the report"""
153
+ return super().get_definition(type = "reports", format = format)
154
+
155
+ def update_definition(self, definition):
156
+ """Method to update the definition of the report"""
157
+ return super().update_definition(definition, type = "reports")
158
+
59
159
  class SemanticModel(Item):
60
160
  """Class to represent a semantic model in Microsoft Fabric"""
61
161
 
62
162
  def __init__(self, id, display_name, type, workspace_id, auth, properties = None, definition=None, description=""):
63
163
  super().__init__(id, display_name, type, workspace_id, auth, properties, definition, description)
64
164
 
165
+ def from_dict(item_dict, auth):
166
+ return SemanticModel(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
167
+ properties=item_dict.get('properties', None),
168
+ definition=item_dict.get('definition', None), description=item_dict.get('description', ""), auth=auth)
169
+
170
+ def get_definition(self, format=None):
171
+ """Method to get the definition of the semantic model"""
172
+ return super().get_definition(type="semanticModels", format=format)
173
+
174
+ def update_definition(self, definition):
175
+ """Method to update the definition of the semantic model"""
176
+ return super().update_definition(definition, type = "semanticModels")
177
+
65
178
  class DataPipeline(Item):
66
179
  """Class to represent a spark job definition in Microsoft Fabric"""
67
180
 
68
181
  def __init__(self, id, display_name, type, workspace_id, auth, properties = None, definition=None, description=""):
69
182
  super().__init__(id, display_name, type, workspace_id, auth, properties, definition, description)
70
183
 
184
+ def from_dict(item_dict, auth):
185
+ return DataPipeline(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
186
+ properties=item_dict.get('properties', None),
187
+ definition=item_dict.get('definition', None), description=item_dict.get('description', ""), auth=auth)
188
+
71
189
  def run_on_demand_item_job(self, execution_data=None):
72
- return super().run_on_demand_item_job(job_type = "Pipeline", execution_data=execution_data)
190
+ return super().run_on_demand_item_job(job_type = "Pipeline", execution_data=execution_data)
191
+
@@ -0,0 +1,118 @@
1
+ import json
2
+ from time import sleep
3
+
4
+ import requests
5
+
6
+
7
+ class SparkCustomPool:
8
+ """Class to represent a custom pool in Microsoft Fabric"""
9
+
10
+ def __init__(self, id, name, type, node_family, node_size, auto_scale, dynamic_executor_allocation, workspace_id, auth) -> None:
11
+
12
+ self.id = id
13
+ self.name = name
14
+ self.type = type
15
+ self.node_family = node_family
16
+ self.node_size = node_size
17
+ self.auto_scale = auto_scale
18
+ self.dynamic_executor_allocation = dynamic_executor_allocation
19
+ self.workspace_id = workspace_id
20
+
21
+ self.auth = auth
22
+
23
+ def __str__(self) -> str:
24
+ """Return a string representation of the workspace object"""
25
+ dict_ = {
26
+ "id": self.id,
27
+ "name": self.name,
28
+ "type": self.type,
29
+ "nodeFamily": self.node_family,
30
+ "nodeSize": self.node_size,
31
+ "autoScale": self.auto_scale,
32
+ "dynamicExecutorAllocation": self.dynamic_executor_allocation,
33
+ "workspaceId": self.workspace_id
34
+ }
35
+ return json.dumps(dict_, indent=2)
36
+
37
+ def __repr__(self) -> str:
38
+ return self.__str__()
39
+
40
+ def from_dict(item_dict, auth):
41
+ """Create Item object from dictionary"""
42
+
43
+ if 'autoScale' not in item_dict:
44
+ item_dict['autoScale'] = item_dict['auto_scale']
45
+
46
+ if 'dynamicExecutorAllocation' not in item_dict:
47
+ item_dict['dynamicExecutorAllocation'] = item_dict['dynamic_executor_allocation']
48
+
49
+ if 'nodeFamily' not in item_dict:
50
+ item_dict['nodeFamily'] = item_dict['node_family']
51
+
52
+ if 'nodeSize' not in item_dict:
53
+ item_dict['nodeSize'] = item_dict['node_size']
54
+
55
+ return SparkCustomPool(id=item_dict['id'], name=item_dict['name'], type=item_dict['type'], node_family=item_dict['nodeFamily'],
56
+ node_size=item_dict['nodeSize'], auto_scale=item_dict['autoScale'], dynamic_executor_allocation=item_dict['dynamicExecutorAllocation'],
57
+ workspace_id=item_dict['workspaceId'], auth=auth)
58
+
59
+
60
+ def delete(self):
61
+ """Delete the custom pool item"""
62
+ # DELETE http://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/spark/pools/{poolId}
63
+
64
+ url = f"https://api.fabric.microsoft.com/v1/workspaces/{self.workspace_id}/spark/pools/{self.id}"
65
+ for _ in range(10):
66
+ response = requests.delete(url=url, headers=self.auth.get_headers())
67
+ if response.status_code == 429:
68
+ print("Too many requests, waiting 10 seconds")
69
+ sleep(10)
70
+ continue
71
+ if response.status_code not in (200, 429):
72
+ raise Exception(f"Error deleting spark pool: {response.status_code}, {response.text}")
73
+ break
74
+
75
+ return response.status_code
76
+
77
+
78
+ def update(self, name, node_family, node_size, auto_scale, dynamic_executor_allocation):
79
+ """Update the custom pool item"""
80
+ url = f"https://api.fabric.microsoft.com/v1/workspaces/{self.workspace_id}/spark/pools/{self.id}"
81
+ body = {}
82
+
83
+ if name is not None:
84
+ body['name'] = name
85
+ if node_family is not None:
86
+ body['nodeFamily'] = node_family
87
+ if node_size is not None:
88
+ body['nodeSize'] = node_size
89
+ if auto_scale is not None:
90
+ body['autoScale'] = auto_scale
91
+ if dynamic_executor_allocation is not None:
92
+ body['dynamicExecutorAllocation'] = dynamic_executor_allocation
93
+
94
+ if not body:
95
+ return self
96
+ for _ in range(10):
97
+ response = requests.patch(url=url, headers=self.auth.get_headers(), json=body)
98
+ if response.status_code == 429:
99
+ print("Too many requests, waiting 10 seconds")
100
+ sleep(10)
101
+ continue
102
+ if response.status_code not in (200, 429):
103
+ raise Exception(f"Error updating item: {response.status_code}, {response.text}")
104
+ break
105
+
106
+ if name is not None:
107
+ self.name = name
108
+ if node_family is not None:
109
+ self.node_family = node_family
110
+ if node_size is not None:
111
+ self.node_size = node_size
112
+ if auto_scale is not None:
113
+ self.auto_scale = auto_scale
114
+ if dynamic_executor_allocation is not None:
115
+ self.dynamic_executor_allocation = dynamic_executor_allocation
116
+
117
+ return self
118
+