msfabricpysdkcore 0.0.13__py3-none-any.whl → 0.1.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. msfabricpysdkcore/__init__.py +2 -1
  2. msfabricpysdkcore/admin_item.py +19 -45
  3. msfabricpysdkcore/admin_workspace.py +13 -60
  4. msfabricpysdkcore/adminapi.py +401 -476
  5. msfabricpysdkcore/auth.py +10 -6
  6. msfabricpysdkcore/client.py +124 -7
  7. msfabricpysdkcore/coreapi.py +2570 -822
  8. msfabricpysdkcore/deployment_pipeline.py +34 -146
  9. msfabricpysdkcore/domain.py +20 -219
  10. msfabricpysdkcore/environment.py +13 -172
  11. msfabricpysdkcore/fabric_azure_capacity.py +77 -0
  12. msfabricpysdkcore/fabric_azure_client.py +228 -0
  13. msfabricpysdkcore/item.py +55 -331
  14. msfabricpysdkcore/job_instance.py +8 -22
  15. msfabricpysdkcore/lakehouse.py +9 -118
  16. msfabricpysdkcore/long_running_operation.py +7 -37
  17. msfabricpysdkcore/onelakeshortcut.py +7 -21
  18. msfabricpysdkcore/otheritems.py +66 -91
  19. msfabricpysdkcore/spark_custom_pool.py +7 -47
  20. msfabricpysdkcore/tests/test_admin_apis.py +9 -10
  21. msfabricpysdkcore/tests/test_datapipelines.py +15 -18
  22. msfabricpysdkcore/tests/test_deployment_pipeline.py +3 -3
  23. msfabricpysdkcore/tests/test_domains.py +6 -5
  24. msfabricpysdkcore/tests/test_environments.py +54 -5
  25. msfabricpysdkcore/tests/test_evenhouses.py +47 -0
  26. msfabricpysdkcore/tests/test_evenstreams.py +20 -20
  27. msfabricpysdkcore/tests/test_external_data_shares.py +3 -3
  28. msfabricpysdkcore/tests/test_fabric_azure_client.py +78 -0
  29. msfabricpysdkcore/tests/test_git.py +8 -9
  30. msfabricpysdkcore/tests/test_items.py +81 -0
  31. msfabricpysdkcore/tests/test_jobs.py +2 -2
  32. msfabricpysdkcore/tests/test_kql_queryset.py +49 -0
  33. msfabricpysdkcore/tests/test_kqldatabases.py +3 -3
  34. msfabricpysdkcore/tests/test_lakehouse.py +84 -0
  35. msfabricpysdkcore/tests/test_ml_experiments.py +47 -0
  36. msfabricpysdkcore/tests/test_ml_models.py +47 -0
  37. msfabricpysdkcore/tests/test_notebooks.py +57 -0
  38. msfabricpysdkcore/tests/test_one_lake_data_access_security.py +2 -4
  39. msfabricpysdkcore/tests/test_other_items.py +45 -0
  40. msfabricpysdkcore/tests/test_reports.py +52 -0
  41. msfabricpysdkcore/tests/test_semantic_model.py +50 -0
  42. msfabricpysdkcore/tests/test_shortcuts.py +4 -4
  43. msfabricpysdkcore/tests/test_spark.py +9 -9
  44. msfabricpysdkcore/tests/test_sparkjobdefinition.py +2 -2
  45. msfabricpysdkcore/tests/test_warehouses.py +50 -0
  46. msfabricpysdkcore/tests/test_workspaces_capacities.py +16 -13
  47. msfabricpysdkcore/workspace.py +397 -1163
  48. {msfabricpysdkcore-0.0.13.dist-info → msfabricpysdkcore-0.1.2.dist-info}/METADATA +72 -10
  49. msfabricpysdkcore-0.1.2.dist-info/RECORD +55 -0
  50. {msfabricpysdkcore-0.0.13.dist-info → msfabricpysdkcore-0.1.2.dist-info}/WHEEL +1 -1
  51. msfabricpysdkcore-0.0.13.dist-info/RECORD +0 -41
  52. {msfabricpysdkcore-0.0.13.dist-info → msfabricpysdkcore-0.1.2.dist-info}/LICENSE +0 -0
  53. {msfabricpysdkcore-0.0.13.dist-info → msfabricpysdkcore-0.1.2.dist-info}/top_level.txt +0 -0
@@ -1,54 +1,24 @@
1
- import json
2
- import requests
3
1
  from time import sleep, time
2
+ from msfabricpysdkcore.coreapi import FabricClientCore
4
3
 
5
4
  class LongRunningOperation:
6
5
  """Class to represent a workspace in Microsoft Fabric"""
7
6
 
8
- def __init__(self, operation_id, auth) -> None:
7
+ def __init__(self, operation_id, core_client: FabricClientCore) -> None:
9
8
  self.operation_id = operation_id
10
- self.auth = auth
9
+ self.core_client = core_client
11
10
 
12
11
  self.state = self.get_operation_state()["status"]
13
12
 
14
13
 
15
14
  def get_operation_results(self):
16
- """Get the results of an operation"""
17
- url = f"https://api.fabric.microsoft.com/v1/operations/{self.operation_id}/result"
18
-
19
- for _ in range(10):
20
- response = requests.get(url=url, headers=self.auth.get_headers())
21
- if response.status_code == 429:
22
- print("Too many requests, waiting 10 seconds")
23
- sleep(10)
24
- continue
25
- if response.status_code == 400:
26
- return None
27
- if response.status_code not in (200, 429):
28
- raise Exception(f"Error getting operation results: {response.status_code}, {response.text}")
29
- break
30
-
31
- return json.loads(response.text)
15
+ return self.core_client.get_operation_results(operation_id=self.operation_id)
32
16
 
33
17
  def get_operation_state(self):
34
- """Get the state of an operation"""
35
- url = f"https://api.fabric.microsoft.com/v1/operations/{self.operation_id}"
36
-
37
- for _ in range(10):
38
- response = requests.get(url=url, headers=self.auth.get_headers())
39
- if response.status_code == 429:
40
- print("Too many requests, waiting 10 seconds")
41
- sleep(10)
42
- continue
43
- if response.status_code not in (200, 429):
44
- raise Exception(f"Error getting operation state: {response.status_code}, {response.text}")
45
- break
46
-
47
- return json.loads(response.text)
18
+ return self.core_client.get_operation_state(operation_id=self.operation_id)
48
19
 
49
20
  def wait_for_completion(self):
50
21
  """Wait for the operation to complete"""
51
- max_iter = 20
52
22
  start_time = time()
53
23
  while self.state not in ('Succeeded', 'Failed'):
54
24
  self.state = self.get_operation_state()["status"]
@@ -63,7 +33,7 @@ class LongRunningOperation:
63
33
  return self.state
64
34
 
65
35
 
66
- def check_long_running_operation(headers, auth):
36
+ def check_long_running_operation(headers, core_client):
67
37
  """Check the status of a long running operation"""
68
38
  location = headers.get('Location', None)
69
39
  operation_id = headers.get('x-ms-operation-id', None)
@@ -73,7 +43,7 @@ def check_long_running_operation(headers, auth):
73
43
  if not operation_id:
74
44
  print("Operation initiated, no operation id found")
75
45
  return None
76
- lro = LongRunningOperation(operation_id=operation_id, auth=auth)
46
+ lro = LongRunningOperation(operation_id=operation_id, core_client=core_client)
77
47
  lro.wait_for_completion()
78
48
 
79
49
  return lro.get_operation_results()
@@ -1,12 +1,11 @@
1
1
  import json
2
- import requests
3
- from time import sleep
2
+ from msfabricpysdkcore.coreapi import FabricClientCore
4
3
 
5
4
  class OneLakeShortcut:
6
5
  """Class to represent a onelake shortcut in Microsoft Fabric"""
7
6
 
8
7
  def __init__(self, name, path, workspace_id, item_id, target,
9
- auth) -> None:
8
+ core_client: FabricClientCore) -> None:
10
9
 
11
10
  self.name = name
12
11
  self.path = path
@@ -14,7 +13,7 @@ class OneLakeShortcut:
14
13
  self.item_id = item_id
15
14
  self.workspace_id = workspace_id
16
15
 
17
- self.user_auth = auth
16
+ self.core_client = core_client
18
17
 
19
18
  def __str__(self) -> str:
20
19
  """Return a string representation of the workspace object"""
@@ -30,29 +29,16 @@ class OneLakeShortcut:
30
29
  def __repr__(self) -> str:
31
30
  return self.__str__()
32
31
 
33
- def from_dict(short_dict, auth):
32
+ def from_dict(short_dict, core_client):
34
33
  """Create OneLakeShortCut object from dictionary"""
35
34
  return OneLakeShortcut(name=short_dict['name'],
36
35
  path=short_dict['path'],
37
36
  target=short_dict['target'],
38
37
  item_id=short_dict['itemId'],
39
38
  workspace_id=short_dict['workspaceId'],
40
- auth=auth)
39
+ core_client=core_client)
41
40
 
42
41
  def delete(self):
43
42
  """Delete the shortcut"""
44
-
45
- url = f"https://api.fabric.microsoft.com/v1/workspaces/{self.workspace_id}/items/{self.item_id}/shortcuts/{self.path}/{self.name}"
46
- for _ in range(10):
47
- response = requests.delete(url=url, headers=self.user_auth.get_headers())
48
- if response.status_code == 429:
49
- print("Too many requests, waiting 10 seconds")
50
- sleep(10)
51
- continue
52
- if response.status_code not in (200, 429):
53
- print(response.status_code)
54
- print(response.text)
55
- raise Exception(f"Error deleting shortcut: {response.text}")
56
- break
57
-
58
- return response.status_code
43
+ return self.core_client.delete_shortcut(workspace_id=self.workspace_id, item_id=self.item_id, path=self.path,
44
+ name=self.name)
@@ -1,19 +1,15 @@
1
- import json
2
- from time import sleep
3
- import requests
4
1
  from msfabricpysdkcore.item import Item
5
- from msfabricpysdkcore.long_running_operation import check_long_running_operation
6
2
 
7
3
 
8
4
  class Eventhouse(Item):
9
5
  """Class to represent a eventhouse in Microsoft Fabric"""
10
6
 
11
- def __init__(self, id, display_name, type, workspace_id, auth, properties = None, description=""):
7
+ def __init__(self, id, display_name, type, workspace_id, core_client, properties = None, description=""):
12
8
  super().__init__(id = id, display_name=display_name, type=type,
13
- workspace_id=workspace_id, auth=auth, properties=properties,
9
+ workspace_id=workspace_id, core_client=core_client, properties=properties,
14
10
  description=description)
15
11
 
16
- def from_dict(item_dict, auth):
12
+ def from_dict(item_dict, core_client):
17
13
  if "displayName" not in item_dict:
18
14
  item_dict["displayName"] = item_dict["display_name"]
19
15
  if "workspaceId" not in item_dict:
@@ -22,7 +18,7 @@ class Eventhouse(Item):
22
18
  return Eventhouse(id=item_dict['id'], display_name=item_dict['displayName'],
23
19
  type=item_dict['type'], workspace_id=item_dict['workspaceId'],
24
20
  properties=item_dict.get('properties', None),
25
- description=item_dict.get('description', ""), auth=auth)
21
+ description=item_dict.get('description', ""), core_client=core_client)
26
22
 
27
23
  def create_kql_database(self, display_name = None, description= None):
28
24
  from msfabricpysdkcore.coreapi import FabricClientCore
@@ -30,191 +26,170 @@ class Eventhouse(Item):
30
26
  creation_payload = {"databaseType" : "ReadWrite",
31
27
  "parentEventhouseItemId" : self.id}
32
28
 
33
- fcc = FabricClientCore(silent=True)
34
-
35
- return fcc.create_kql_database(workspace_id = self.workspace_id,
36
- display_name = display_name, description = description,
37
- creation_payload= creation_payload)
29
+ return self.core_client.create_kql_database(self.workspace_id, display_name=display_name, description=description, creation_payload=creation_payload)
38
30
 
39
31
  class SparkJobDefinition(Item):
40
32
  """Class to represent a spark job definition in Microsoft Fabric"""
41
33
 
42
- def __init__(self, id, display_name, type, workspace_id, auth, properties = None, definition=None, description=""):
43
- super().__init__(id, display_name, type, workspace_id, auth, properties, definition, description)
34
+ def __init__(self, id, display_name, type, workspace_id, core_client, properties = None, definition=None, description=""):
35
+ super().__init__(id, display_name, type, workspace_id, core_client, properties, definition, description)
44
36
 
45
- def from_dict(item_dict, auth):
37
+ def from_dict(item_dict, core_client):
46
38
  return SparkJobDefinition(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
47
39
  properties=item_dict.get('properties', None),
48
- definition=item_dict.get('definition', None), description=item_dict.get('description', ""), auth=auth)
40
+ definition=item_dict.get('definition', None), description=item_dict.get('description', ""), core_client=core_client)
49
41
 
50
42
  def get_definition(self, format=None):
51
- return super().get_definition(type="sparkJobDefinitions", format=format)
43
+ resp_dict = self.core_client.get_spark_job_definition_definition(self.workspace_id, self.id, format=format)
44
+ self.definition = resp_dict['definition']
45
+ return resp_dict
52
46
 
53
47
  def update_definition(self, definition):
54
- return super().update_definition(definition=definition, type="sparkJobDefinitions")
48
+ return self.core_client.update_spark_job_definition_definition(self.workspace_id, self.id, definition)
55
49
 
56
50
  def run_on_demand_spark_job_definition(self, job_type = "sparkjob"):
57
- # POST https://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/sparkJobDefinitions/{sparkJobDefinitionId}/jobs/instances?jobType={jobType}
58
- """Method to run a spark job definition on demand"""
59
- url = f"https://api.fabric.microsoft.com/v1/workspaces/{self.workspace_id}/sparkJobDefinitions/{self.id}/jobs/instances?jobType={job_type}"
60
-
61
- for _ in range(10):
62
- response = requests.post(url=url, headers=self.auth.get_headers())
63
- if response.status_code == 429:
64
- print("Too many requests, waiting 10 seconds")
65
- sleep(10)
66
- continue
67
- if response.status_code == 202:
68
- location = response.headers['Location']
69
- job_instance_id = location.split('/')[-1]
70
-
71
- from msfabricpysdkcore import FabricClientCore
72
- fc = FabricClientCore(silent=True)
73
- fc.auth = self.auth
74
- return fc.get_item_job_instance(workspace_id = self.workspace_id,
75
- item_id = self.id,
76
- job_instance_id = job_instance_id)
77
- if response.status_code not in (200, 201, 202, 429):
78
- raise Exception(f"Error running on demand spark job definition: {response.status_code}, {response.text}")
79
- break
80
-
81
- return response
51
+ return self.core_client.run_on_demand_spark_job_definition(workspace_id=self.workspace_id, spark_job_definition_id=self.id, job_type=job_type)
82
52
 
83
53
 
84
54
  class Warehouse(Item):
85
55
  """Class to represent a warehouse in Microsoft Fabric"""
86
56
 
87
- def __init__(self, id, display_name, type, workspace_id, auth, properties = None, definition=None, description=""):
88
- super().__init__(id, display_name, type, workspace_id, auth, properties, definition, description)
57
+ def __init__(self, id, display_name, type, workspace_id, core_client, properties = None, definition=None, description=""):
58
+ super().__init__(id, display_name, type, workspace_id, core_client, properties, definition, description)
89
59
 
90
- def from_dict(item_dict, auth):
60
+ def from_dict(item_dict, core_client):
91
61
  return Warehouse(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
92
62
  properties=item_dict.get('properties', None),
93
- definition=item_dict.get('definition', None), description=item_dict.get('description', ""), auth=auth)
63
+ definition=item_dict.get('definition', None), description=item_dict.get('description', ""), core_client=core_client)
94
64
 
95
65
  class KQLDatabase(Item):
96
66
  """Class to represent a kql database in Microsoft Fabric"""
97
67
 
98
- def __init__(self, id, display_name, type, workspace_id, auth, properties = None, definition=None, description=""):
99
- super().__init__(id, display_name, type, workspace_id, auth, properties, definition, description)
68
+ def __init__(self, id, display_name, type, workspace_id, core_client, properties = None, definition=None, description=""):
69
+ super().__init__(id, display_name, type, workspace_id, core_client, properties, definition, description)
100
70
 
101
- def from_dict(item_dict, auth):
71
+ def from_dict(item_dict, core_client):
102
72
  return KQLDatabase(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
103
73
  properties=item_dict.get('properties', None),
104
- definition=item_dict.get('definition', None), description=item_dict.get('description', ""), auth=auth)
74
+ definition=item_dict.get('definition', None), description=item_dict.get('description', ""), core_client=core_client)
105
75
 
106
76
  class KQLQueryset(Item):
107
77
  """Class to represent a kql database in Microsoft Fabric"""
108
78
 
109
- def __init__(self, id, display_name, type, workspace_id, auth, properties = None, definition=None, description=""):
110
- super().__init__(id, display_name, type, workspace_id, auth, properties, definition, description)
79
+ def __init__(self, id, display_name, type, workspace_id, core_client, properties = None, definition=None, description=""):
80
+ super().__init__(id, display_name, type, workspace_id, core_client, properties, definition, description)
111
81
 
112
- def from_dict(item_dict, auth):
82
+ def from_dict(item_dict, core_client):
113
83
  return KQLQueryset(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
114
84
  properties=item_dict.get('properties', None),
115
- definition=item_dict.get('definition', None), description=item_dict.get('description', ""), auth=auth)
85
+ definition=item_dict.get('definition', None), description=item_dict.get('description', ""), core_client=core_client)
116
86
 
117
87
  class Eventstream(Item):
118
88
  """Class to represent a eventstream in Microsoft Fabric"""
119
89
 
120
- def __init__(self, id, display_name, type, workspace_id, auth, properties = None, definition=None, description=""):
121
- super().__init__(id, display_name, type, workspace_id, auth, properties, definition, description)
90
+ def __init__(self, id, display_name, type, workspace_id, core_client, properties = None, definition=None, description=""):
91
+ super().__init__(id, display_name, type, workspace_id, core_client, properties, definition, description)
122
92
 
123
- def from_dict(item_dict, auth):
93
+ def from_dict(item_dict, core_client):
124
94
  return Eventstream(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
125
95
  properties=item_dict.get('properties', None),
126
- definition=item_dict.get('definition', None), description=item_dict.get('description', ""), auth=auth)
96
+ definition=item_dict.get('definition', None), description=item_dict.get('description', ""), core_client=core_client)
127
97
 
128
98
  class MLExperiment(Item):
129
99
  """Class to represent a ml experiment in Microsoft Fabric"""
130
100
 
131
- def __init__(self, id, display_name, type, workspace_id, auth, properties = None, definition=None, description=""):
132
- super().__init__(id, display_name, type, workspace_id, auth, properties, definition, description)
101
+ def __init__(self, id, display_name, type, workspace_id, core_client, properties = None, definition=None, description=""):
102
+ super().__init__(id, display_name, type, workspace_id, core_client, properties, definition, description)
133
103
 
134
- def from_dict(item_dict, auth):
104
+ def from_dict(item_dict, core_client):
135
105
  return MLExperiment(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
136
106
  properties=item_dict.get('properties', None),
137
- definition=item_dict.get('definition', None), description=item_dict.get('description', ""), auth=auth)
107
+ definition=item_dict.get('definition', None), description=item_dict.get('description', ""), core_client=core_client)
138
108
 
139
109
  class MLModel(Item):
140
110
  """Class to represent a ml model in Microsoft Fabric"""
141
111
 
142
- def __init__(self, id, display_name, type, workspace_id, auth, properties = None, definition=None, description=""):
143
- super().__init__(id, display_name, type, workspace_id, auth, properties, definition, description)
112
+ def __init__(self, id, display_name, type, workspace_id, core_client, properties = None, definition=None, description=""):
113
+ super().__init__(id, display_name, type, workspace_id, core_client, properties, definition, description)
144
114
 
145
- def from_dict(item_dict, auth):
115
+ def from_dict(item_dict, core_client):
146
116
  return MLModel(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
147
117
  properties=item_dict.get('properties', None),
148
- definition=item_dict.get('definition', None), description=item_dict.get('description', ""), auth=auth)
118
+ definition=item_dict.get('definition', None), description=item_dict.get('description', ""), core_client=core_client)
149
119
 
150
120
  class Notebook(Item):
151
121
  """Class to represent a notebook in Microsoft Fabric"""
152
122
 
153
- def __init__(self, id, display_name, type, workspace_id, auth, properties = None, definition=None, description=""):
154
- super().__init__(id, display_name, type, workspace_id, auth, properties, definition, description)
123
+ def __init__(self, id, display_name, type, workspace_id, core_client, properties = None, definition=None, description=""):
124
+ super().__init__(id, display_name, type, workspace_id, core_client, properties, definition, description)
155
125
 
156
- def from_dict(item_dict, auth):
126
+ def from_dict(item_dict, core_client):
157
127
  return Notebook(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
158
128
  properties=item_dict.get('properties', None),
159
- definition=item_dict.get('definition', None), description=item_dict.get('description', ""), auth=auth)
129
+ definition=item_dict.get('definition', None), description=item_dict.get('description', ""), core_client=core_client)
160
130
 
161
131
  def get_definition(self, format=None):
162
132
  """Method to get the definition of the notebook"""
163
- return super().get_definition(type = "notebooks", format = format)
133
+ definition = self.core_client.get_item_definition(self.workspace_id, self.id, type="notebooks", format=format)
134
+ self.definition = definition
135
+ return definition
164
136
 
165
137
  def update_definition(self, definition):
166
138
  """Method to update the definition of the notebook"""
167
- return super().update_definition(definition, type = "notebooks")
139
+ return self.core_client.update_item_definition(self.workspace_id, self.id, definition, type="notebooks")
168
140
 
169
141
  class Report(Item):
170
142
  """Class to represent a report in Microsoft Fabric"""
171
143
 
172
- def __init__(self, id, display_name, type, workspace_id, auth, properties = None, definition=None, description=""):
173
- super().__init__(id, display_name, type, workspace_id, auth, properties, definition, description)
144
+ def __init__(self, id, display_name, type, workspace_id, core_client, properties = None, definition=None, description=""):
145
+ super().__init__(id, display_name, type, workspace_id, core_client, properties, definition, description)
174
146
 
175
- def from_dict(item_dict, auth):
147
+ def from_dict(item_dict, core_client):
176
148
  return Report(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
177
149
  properties=item_dict.get('properties', None),
178
- definition=item_dict.get('definition', None), description=item_dict.get('description', ""), auth=auth)
150
+ definition=item_dict.get('definition', None), description=item_dict.get('description', ""), core_client=core_client)
179
151
 
180
152
  def get_definition(self, type=None, format=None):
181
153
  """Method to get the definition of the report"""
182
- return super().get_definition(type = "reports", format = format)
154
+ self.definition = self.core_client.get_item_definition(self.workspace_id, self.id, type="reports", format=format)
155
+ return self.definition
183
156
 
184
157
  def update_definition(self, definition):
185
158
  """Method to update the definition of the report"""
186
- return super().update_definition(definition, type = "reports")
159
+ return self.core_client.update_item_definition(self.workspace_id, self.id, definition, type="reports")
187
160
 
188
161
  class SemanticModel(Item):
189
162
  """Class to represent a semantic model in Microsoft Fabric"""
190
163
 
191
- def __init__(self, id, display_name, type, workspace_id, auth, properties = None, definition=None, description=""):
192
- super().__init__(id, display_name, type, workspace_id, auth, properties, definition, description)
164
+ def __init__(self, id, display_name, type, workspace_id, core_client, properties = None, definition=None, description=""):
165
+ super().__init__(id, display_name, type, workspace_id, core_client, properties, definition, description)
193
166
 
194
- def from_dict(item_dict, auth):
167
+ def from_dict(item_dict, core_client):
195
168
  return SemanticModel(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
196
169
  properties=item_dict.get('properties', None),
197
- definition=item_dict.get('definition', None), description=item_dict.get('description', ""), auth=auth)
170
+ definition=item_dict.get('definition', None), description=item_dict.get('description', ""), core_client=core_client)
198
171
 
199
172
  def get_definition(self, format=None):
200
173
  """Method to get the definition of the semantic model"""
201
- return super().get_definition(type="semanticModels", format=format)
174
+ self.definition = self.core_client.get_item_definition(self.workspace_id, self.id, type="semanticModels", format=format)
175
+ return self.definition
202
176
 
203
177
  def update_definition(self, definition):
204
178
  """Method to update the definition of the semantic model"""
205
- return super().update_definition(definition, type = "semanticModels")
179
+ return self.core_client.update_item_definition(self.workspace_id, self.id, definition, type="semanticModels")
206
180
 
207
181
  class DataPipeline(Item):
208
182
  """Class to represent a spark job definition in Microsoft Fabric"""
209
183
 
210
- def __init__(self, id, display_name, type, workspace_id, auth, properties = None, definition=None, description=""):
211
- super().__init__(id, display_name, type, workspace_id, auth, properties, definition, description)
184
+ def __init__(self, id, display_name, type, workspace_id, core_client, properties = None, definition=None, description=""):
185
+ super().__init__(id, display_name, type, workspace_id, core_client, properties, definition, description)
212
186
 
213
- def from_dict(item_dict, auth):
187
+ def from_dict(item_dict, core_client):
214
188
  return DataPipeline(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
215
189
  properties=item_dict.get('properties', None),
216
- definition=item_dict.get('definition', None), description=item_dict.get('description', ""), auth=auth)
190
+ definition=item_dict.get('definition', None), description=item_dict.get('description', ""), core_client=core_client)
217
191
 
218
192
  def run_on_demand_item_job(self, execution_data=None):
219
- return super().run_on_demand_item_job(job_type = "Pipeline", execution_data=execution_data)
193
+ return self.core_client.run_on_demand_item_job(workspace_id=self.workspace_id, item_id=self.id, job_type="Pipeline", execution_data=execution_data)
194
+
220
195
 
@@ -1,13 +1,11 @@
1
1
  import json
2
- from time import sleep
3
-
4
- import requests
2
+ from msfabricpysdkcore.coreapi import FabricClientCore
5
3
 
6
4
 
7
5
  class SparkCustomPool:
8
6
  """Class to represent a custom pool in Microsoft Fabric"""
9
7
 
10
- def __init__(self, id, name, type, node_family, node_size, auto_scale, dynamic_executor_allocation, workspace_id, auth) -> None:
8
+ def __init__(self, id, name, type, node_family, node_size, auto_scale, dynamic_executor_allocation, workspace_id, core_client: FabricClientCore) -> None:
11
9
 
12
10
  self.id = id
13
11
  self.name = name
@@ -18,7 +16,7 @@ class SparkCustomPool:
18
16
  self.dynamic_executor_allocation = dynamic_executor_allocation
19
17
  self.workspace_id = workspace_id
20
18
 
21
- self.auth = auth
19
+ self.core_client = core_client
22
20
 
23
21
  def __str__(self) -> str:
24
22
  """Return a string representation of the workspace object"""
@@ -37,7 +35,7 @@ class SparkCustomPool:
37
35
  def __repr__(self) -> str:
38
36
  return self.__str__()
39
37
 
40
- def from_dict(item_dict, auth):
38
+ def from_dict(item_dict, core_client):
41
39
  """Create Item object from dictionary"""
42
40
 
43
41
  if 'autoScale' not in item_dict:
@@ -54,55 +52,17 @@ class SparkCustomPool:
54
52
 
55
53
  return SparkCustomPool(id=item_dict['id'], name=item_dict['name'], type=item_dict['type'], node_family=item_dict['nodeFamily'],
56
54
  node_size=item_dict['nodeSize'], auto_scale=item_dict['autoScale'], dynamic_executor_allocation=item_dict['dynamicExecutorAllocation'],
57
- workspace_id=item_dict['workspaceId'], auth=auth)
55
+ workspace_id=item_dict['workspaceId'], core_client=core_client)
58
56
 
59
57
 
60
58
  def delete(self):
61
59
  """Delete the custom pool item"""
62
- # DELETE http://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/spark/pools/{poolId}
63
-
64
- url = f"https://api.fabric.microsoft.com/v1/workspaces/{self.workspace_id}/spark/pools/{self.id}"
65
- for _ in range(10):
66
- response = requests.delete(url=url, headers=self.auth.get_headers())
67
- if response.status_code == 429:
68
- print("Too many requests, waiting 10 seconds")
69
- sleep(10)
70
- continue
71
- if response.status_code not in (200, 429):
72
- raise Exception(f"Error deleting spark pool: {response.status_code}, {response.text}")
73
- break
74
-
75
- return response.status_code
60
+ return self.core_client.delete_workspace_custom_pool(self.workspace_id, self.id)
76
61
 
77
62
 
78
63
  def update(self, name, node_family, node_size, auto_scale, dynamic_executor_allocation):
79
64
  """Update the custom pool item"""
80
- url = f"https://api.fabric.microsoft.com/v1/workspaces/{self.workspace_id}/spark/pools/{self.id}"
81
- body = {}
82
-
83
- if name is not None:
84
- body['name'] = name
85
- if node_family is not None:
86
- body['nodeFamily'] = node_family
87
- if node_size is not None:
88
- body['nodeSize'] = node_size
89
- if auto_scale is not None:
90
- body['autoScale'] = auto_scale
91
- if dynamic_executor_allocation is not None:
92
- body['dynamicExecutorAllocation'] = dynamic_executor_allocation
93
-
94
- if not body:
95
- return self
96
- for _ in range(10):
97
- response = requests.patch(url=url, headers=self.auth.get_headers(), json=body)
98
- if response.status_code == 429:
99
- print("Too many requests, waiting 10 seconds")
100
- sleep(10)
101
- continue
102
- if response.status_code not in (200, 429):
103
- raise Exception(f"Error updating item: {response.status_code}, {response.text}")
104
- break
105
-
65
+ _ = self.core_client.update_workspace_custom_pool(self.workspace_id, self.id, name, node_family, node_size, auto_scale, dynamic_executor_allocation)
106
66
  if name is not None:
107
67
  self.name = name
108
68
  if node_family is not None:
@@ -13,19 +13,19 @@ class TestFabricClientCore(unittest.TestCase):
13
13
  def test_admin_api(self):
14
14
  fca = self.fca
15
15
 
16
- user_id = 'b4f4e299-e6e1-4667-886c-57e4a8dde1c2'
16
+ user_id = '1dc64c6e-7a10-4ea9-8488-85d0739a377d'
17
17
 
18
18
  # List workspaces
19
- ws = fca.list_workspaces(name="testworkspace")[0]
19
+ ws = fca.list_workspaces(name="testitems")[0]
20
20
 
21
- self.assertEqual(ws.name, "testworkspace")
21
+ self.assertEqual(ws.name, "testitems")
22
22
 
23
23
  # Get workspace
24
24
  ws_clone = fca.get_workspace(workspace_id=ws.id)
25
25
 
26
26
  self.assertEqual(ws.id, ws_clone.id)
27
27
 
28
- # Get workspace access details
28
+ # List workspace access details
29
29
 
30
30
  ws_access = fca.list_workspace_access_details(ws.id)
31
31
  principials = ws_access["accessDetails"]
@@ -71,8 +71,8 @@ class TestFabricClientCore(unittest.TestCase):
71
71
 
72
72
  fca = self.fca
73
73
 
74
- items = [{"id": "d417b834-d381-454c-9cf0-c491f69508de", "type": "Lakehouse"}]
75
- label_id = "defa4170-0d19-0005-000a-bc88714345d2"
74
+ items = [{"id": "9cdd3192-bcd0-4cbe-b945-29f5964e7ab7", "type": "Lakehouse"}]
75
+ label_id = "defa4170-0d19-0005-0007-bc88714345d2"
76
76
  resp = fca.bulk_set_labels(items=items, label_id=label_id)
77
77
  self.assertEqual(resp["itemsChangeLabelStatus"][0]["status"], "Succeeded")
78
78
  resp = fca.bulk_remove_labels(items=items)
@@ -83,17 +83,16 @@ class TestFabricClientCore(unittest.TestCase):
83
83
  fca = self.fca
84
84
 
85
85
  data_shares = fca.list_external_data_shares()
86
- ws = fca.list_workspaces(name="testworkspace")[0]
86
+ ws_id = "63aa9e13-4912-4abe-9156-8a56e565b7a3"
87
87
 
88
- data_shares = [d for d in data_shares if d['workspaceId'] == ws.id]
88
+ data_shares = [d for d in data_shares if d['workspaceId'] == ws_id]
89
89
 
90
90
  self.assertGreater(len(data_shares), 0)
91
91
  fca.revoke_external_data_share(external_data_share_id = data_shares[0]['id'],
92
92
  item_id = data_shares[0]['itemId'],
93
93
  workspace_id = data_shares[0]['workspaceId'])
94
94
  data_shares = fca.list_external_data_shares()
95
- ws = fca.list_workspaces(name="testworkspace")[0]
96
95
 
97
- data_shares = [d for d in data_shares if d['workspaceId'] == ws.id]
96
+ data_shares = [d for d in data_shares if d['workspaceId'] == ws_id]
98
97
 
99
98
  self.assertEqual(data_shares[0]['status'], 'Revoked')
@@ -16,33 +16,30 @@ class TestFabricClientCore(unittest.TestCase):
16
16
  def test_data_pipelines(self):
17
17
 
18
18
  fc = self.fc
19
- workspace_id = 'd8a5abe0-9eed-406d-ab46-343bc57ddbe5'
20
-
19
+ workspace_id = '63aa9e13-4912-4abe-9156-8a56e565b7a3'
20
+ datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
21
+ pipeline_name = f"pipeline_{datetime_str}"
22
+
23
+ dp = fc.create_data_pipeline(workspace_id, display_name=pipeline_name, description="asda")
24
+ dp.update_definition(dp.definition)
25
+
21
26
  dps = fc.list_data_pipelines(workspace_id)
22
27
  dp_names = [dp.display_name for dp in dps]
23
28
  self.assertGreater(len(dps), 0)
24
- self.assertIn("pipeline1", dp_names)
25
-
26
- dp = fc.get_data_pipeline(workspace_id, data_pipeline_name="pipeline1")
27
- self.assertIsNotNone(dp.id)
28
- self.assertIsNotNone(dp.definition)
29
- self.assertEqual(dp.display_name, "pipeline1")
30
-
31
- dp_new = fc.create_data_pipeline(workspace_id, display_name="pipeline_new", description="asda")
32
- dp_new.update_definition(dp.definition)
33
-
34
- self.assertEqual(dp_new.display_name, "pipeline_new")
29
+ self.assertIn(pipeline_name, dp_names)
35
30
 
36
- dp2 = fc.update_data_pipeline(workspace_id, dp.id, display_name="pipeline2")
31
+ self.assertEqual(dp.display_name, pipeline_name)
32
+ pipeline_name2 = f"pipeline_{datetime_str}_2"
33
+ dp2 = fc.update_data_pipeline(workspace_id, dp.id, display_name=pipeline_name2, return_item=True)
37
34
 
38
35
  dp = fc.get_data_pipeline(workspace_id, data_pipeline_id=dp.id)
39
- self.assertEqual(dp.display_name, "pipeline2")
36
+ self.assertEqual(dp.display_name, pipeline_name2)
40
37
  self.assertEqual(dp.id, dp2.id)
41
38
 
42
- dp2 = fc.update_data_pipeline(workspace_id, dp.id, display_name="pipeline1")
39
+ dp2 = fc.update_data_pipeline(workspace_id, dp.id, display_name=pipeline_name, return_item=True)
43
40
 
44
41
  dp = fc.get_data_pipeline(workspace_id, data_pipeline_id=dp.id)
45
- self.assertEqual(dp.display_name, "pipeline1")
42
+ self.assertEqual(dp.display_name, pipeline_name)
46
43
  self.assertEqual(dp.id, dp2.id)
47
- status_code = fc.delete_data_pipeline(workspace_id, dp_new.id)
44
+ status_code = fc.delete_data_pipeline(workspace_id, dp.id)
48
45
  self.assertEqual(status_code, 200)