msfabricpysdkcore 0.2.2__py3-none-any.whl → 0.2.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- msfabricpysdkcore/adminapi.py +114 -0
- msfabricpysdkcore/coreapi.py +1216 -53
- msfabricpysdkcore/deployment_pipeline.py +101 -28
- msfabricpysdkcore/domain.py +4 -1
- msfabricpysdkcore/eventstream.py +68 -0
- msfabricpysdkcore/folder.py +69 -0
- msfabricpysdkcore/item.py +9 -0
- msfabricpysdkcore/lakehouse.py +9 -1
- msfabricpysdkcore/otheritems.py +59 -40
- msfabricpysdkcore/tests/test_admin_tags.py +46 -0
- msfabricpysdkcore/tests/test_copy_jobs.py +60 -0
- msfabricpysdkcore/tests/test_dataflows.py +60 -0
- msfabricpysdkcore/tests/test_datapipelines.py +44 -29
- msfabricpysdkcore/tests/test_deployment_pipelinev2.py +135 -0
- msfabricpysdkcore/tests/test_eventstream_topology.py +82 -0
- msfabricpysdkcore/tests/test_folders.py +53 -0
- msfabricpysdkcore/tests/test_tags.py +28 -0
- msfabricpysdkcore/tests/test_variable_libary.py +61 -0
- msfabricpysdkcore/workspace.py +354 -0
- {msfabricpysdkcore-0.2.2.dist-info → msfabricpysdkcore-0.2.4.dist-info}/METADATA +226 -24
- {msfabricpysdkcore-0.2.2.dist-info → msfabricpysdkcore-0.2.4.dist-info}/RECORD +24 -14
- {msfabricpysdkcore-0.2.2.dist-info → msfabricpysdkcore-0.2.4.dist-info}/WHEEL +1 -1
- {msfabricpysdkcore-0.2.2.dist-info → msfabricpysdkcore-0.2.4.dist-info/licenses}/LICENSE +0 -0
- {msfabricpysdkcore-0.2.2.dist-info → msfabricpysdkcore-0.2.4.dist-info}/top_level.txt +0 -0
@@ -6,10 +6,11 @@ from msfabricpysdkcore.coreapi import FabricClientCore
|
|
6
6
|
class DeploymentPipeline:
|
7
7
|
"""Class to represent a deployment pipeline in Microsoft Fabric"""
|
8
8
|
|
9
|
-
def __init__(self, id, display_name, description, core_client: FabricClientCore) -> None:
|
9
|
+
def __init__(self, id, display_name, description, stages, core_client: FabricClientCore) -> None:
|
10
10
|
self.id = id
|
11
11
|
self.display_name = display_name
|
12
12
|
self.description = description
|
13
|
+
self.stages = stages
|
13
14
|
self.core_client = core_client
|
14
15
|
|
15
16
|
|
@@ -18,56 +19,108 @@ class DeploymentPipeline:
|
|
18
19
|
if dict["displayName"] == None:
|
19
20
|
dict["displayName"] = dict["display_name"]
|
20
21
|
|
21
|
-
|
22
|
-
return DeploymentPipeline(
|
22
|
+
depl_pipe = DeploymentPipeline(
|
23
23
|
id=dict["id"],
|
24
24
|
display_name=dict["displayName"],
|
25
25
|
description=dict["description"],
|
26
|
+
stages=[],
|
26
27
|
core_client=core_client
|
27
28
|
)
|
28
29
|
|
30
|
+
if "stages" in dict:
|
31
|
+
depl_pipe.stages = dict["stages"]
|
32
|
+
else:
|
33
|
+
dict["stages"] = depl_pipe.List_stages()
|
34
|
+
|
35
|
+
return depl_pipe
|
36
|
+
|
29
37
|
|
30
38
|
def __str__(self) -> str:
|
31
39
|
"""Return a string representation of the workspace object"""
|
32
|
-
dict_ = {"id": self.id, "display_name": self.display_name, "description": self.description}
|
40
|
+
dict_ = {"id": self.id, "display_name": self.display_name, "description": self.description, "stages": self.stages}
|
33
41
|
|
34
42
|
return json.dumps(dict_, indent=2)
|
35
43
|
|
36
44
|
def __repr__(self) -> str:
|
37
45
|
return self.__str__()
|
38
46
|
|
39
|
-
def
|
40
|
-
|
47
|
+
def add_role_assignment(self, principal, role):
|
48
|
+
"""Add a role assignment to the deployment pipeline"""
|
49
|
+
return self.core_client.add_deployment_pipeline_role_assignment(deployment_pipeline_id=self.id, principal=principal, role=role)
|
50
|
+
|
51
|
+
def assign_workspace_to_stage(self, stage_id, workspace_id):
|
52
|
+
"""Assign a workspace to a stage in the deployment pipeline"""
|
53
|
+
return self.core_client.assign_workspace_to_stage(deployment_pipeline_id=self.id, stage_id=stage_id, workspace_id=workspace_id)
|
54
|
+
|
55
|
+
def delete(self):
|
56
|
+
"""Delete the deployment pipeline"""
|
57
|
+
return self.core_client.delete_deployment_pipeline(deployment_pipeline_id=self.id)
|
58
|
+
|
59
|
+
def delete_role_assignment(self, principal_id):
|
60
|
+
"""Delete a role assignment from the deployment pipeline"""
|
61
|
+
return self.core_client.delete_deployment_pipeline_role_assignment(deployment_pipeline_id=self.id, principal_id=principal_id)
|
62
|
+
|
63
|
+
def deploy_stage_content(self, source_stage_id, target_stage_id, created_workspace_details = None,
|
64
|
+
items = None, note = None, options = None, wait_for_completion = True):
|
65
|
+
"""Deploy the content of a stage to another stage in the deployment pipeline"""
|
41
66
|
return self.core_client.deploy_stage_content(deployment_pipeline_id=self.id, source_stage_id=source_stage_id,
|
42
67
|
target_stage_id=target_stage_id, created_workspace_details=created_workspace_details,
|
43
|
-
items=items, note=note, wait_for_completion=wait_for_completion)
|
68
|
+
items=items, note=note, options=options, wait_for_completion=wait_for_completion)
|
69
|
+
|
70
|
+
def deploy(self, source_stage_id, target_stage_id, created_workspace_details = None,
|
71
|
+
items = None, note = None, options = None, wait_for_completion = True):
|
72
|
+
return self.deploy_stage_content(deployment_pipelinesource_stage_id=source_stage_id,
|
73
|
+
target_stage_id=target_stage_id, created_workspace_details=created_workspace_details,
|
74
|
+
items=items, note=note, options=options, wait_for_completion=wait_for_completion)
|
44
75
|
|
45
|
-
def
|
46
|
-
|
76
|
+
def get(self):
|
77
|
+
"""Get the deployment pipeline"""
|
78
|
+
return self.core_client.get_deployment_pipeline(deployment_pipeline_id=self.id)
|
47
79
|
|
80
|
+
def get_operation(self, operation_id):
|
81
|
+
"""Get the deployment pipeline operation"""
|
82
|
+
return self.core_client.get_deployment_pipeline_operation(deployment_pipeline_id=self.id, operation_id=operation_id)
|
48
83
|
|
49
|
-
def
|
50
|
-
|
51
|
-
return self.
|
84
|
+
def get_stage(self, stage_id):
|
85
|
+
"""Get the deployment pipeline stage"""
|
86
|
+
return self.core_client.get_deployment_pipeline_stage(deployment_pipeline_id=self.id, stage_id=stage_id)
|
87
|
+
|
88
|
+
def list_operations(self):
|
89
|
+
"""List the deployment pipeline operations"""
|
90
|
+
return self.core_client.list_deployment_pipeline_operations(deployment_pipeline_id=self.id)
|
52
91
|
|
53
|
-
def
|
54
|
-
|
92
|
+
def list_role_assignments(self):
|
93
|
+
"""List the role assignments of the deployment pipeline"""
|
94
|
+
return self.core_client.list_deployment_pipeline_role_assignments(deployment_pipeline_id=self.id)
|
55
95
|
|
96
|
+
def list_stage_items(self, stage_id = None, stage_name = None):
|
97
|
+
"""List the items in the deployment pipeline stage"""
|
98
|
+
return self.core_client.list_deployment_pipeline_stage_items(deployment_pipeline_id=self.id, stage_id=stage_id, stage_name=stage_name)
|
56
99
|
|
57
|
-
def
|
58
|
-
|
59
|
-
return self.
|
100
|
+
def list_stages(self):
|
101
|
+
"""List the stages in the deployment pipeline"""
|
102
|
+
return self.core_client.list_deployment_pipeline_stages(deployment_pipeline_id=self.id)
|
60
103
|
|
104
|
+
def unassign_workspace_from_stage(self, stage_id):
|
105
|
+
"""Unassign a workspace from a stage in the deployment pipeline"""
|
106
|
+
return self.core_client.unassign_workspace_from_stage(deployment_pipeline_id=self.id, stage_id=stage_id)
|
107
|
+
|
108
|
+
def update(self, display_name = None, description = None):
|
109
|
+
"""Update the deployment pipeline"""
|
110
|
+
return self.core_client.update_deployment_pipeline(deployment_pipeline_id=self.id, display_name=display_name, description=description)
|
61
111
|
|
62
|
-
def
|
63
|
-
|
112
|
+
def update_stage(self, stage_id, display_name, description = None, is_public = None):
|
113
|
+
"""Update the deployment pipeline stage"""
|
114
|
+
return self.core_client.update_deployment_pipeline_stage(deployment_pipeline_id=self.id, stage_id=stage_id, display_name=display_name, description=description, is_public=is_public)
|
64
115
|
|
116
|
+
|
65
117
|
|
66
|
-
class
|
118
|
+
class DeploymentPipelineStage():
|
67
119
|
|
68
120
|
"""Class to represent a deployment pipeline stage in Microsoft Fabric"""
|
69
121
|
|
70
|
-
def __init__(self, id, order, display_name, description, workspace_id, workspace_name, is_public,
|
122
|
+
def __init__(self, id, order, display_name, description, workspace_id, workspace_name, is_public,
|
123
|
+
deployment_pipeline_id, core_client: FabricClientCore, items = None) -> None:
|
71
124
|
self.id = id
|
72
125
|
self.order = order
|
73
126
|
self.display_name = display_name
|
@@ -75,6 +128,7 @@ class Deployment_Pipeline_Stage():
|
|
75
128
|
self.workspace_id = workspace_id
|
76
129
|
self.workspace_name = workspace_name
|
77
130
|
self.is_public = is_public
|
131
|
+
self.items = items
|
78
132
|
self.deployment_pipeline_id = deployment_pipeline_id
|
79
133
|
self.core_client = core_client
|
80
134
|
|
@@ -97,13 +151,14 @@ class Deployment_Pipeline_Stage():
|
|
97
151
|
dict["isPublic"] = dict["is_public"]
|
98
152
|
|
99
153
|
|
100
|
-
return
|
154
|
+
return DeploymentPipelineStage(id=dict["id"],
|
101
155
|
order=dict["order"],
|
102
156
|
display_name=dict["displayName"],
|
103
157
|
description=dict["description"],
|
104
158
|
workspace_id=dict["workspaceId"],
|
105
159
|
workspace_name=dict["workspaceName"],
|
106
160
|
is_public=dict["isPublic"],
|
161
|
+
items = dict.get("items", None),
|
107
162
|
deployment_pipeline_id=dict["deploymentPipelineId"],
|
108
163
|
core_client=core_client
|
109
164
|
)
|
@@ -121,10 +176,28 @@ class Deployment_Pipeline_Stage():
|
|
121
176
|
def __repr__(self) -> str:
|
122
177
|
return self.__str__()
|
123
178
|
|
124
|
-
def
|
125
|
-
|
126
|
-
|
127
|
-
|
179
|
+
def assign_workspace_to_stage(self, workspace_id):
|
180
|
+
"""Assign a workspace to the stage"""
|
181
|
+
resp = self.core_client.assign_workspace_to_stage(deployment_pipeline_id=self.deployment_pipeline_id, stage_id=self.id, workspace_id=workspace_id)
|
182
|
+
self.workspace_id = workspace_id
|
183
|
+
ws = self.core_client.get_workspace(workspace_id=workspace_id)
|
184
|
+
self.workspace_name = ws.display_name
|
185
|
+
return resp
|
186
|
+
|
187
|
+
def get(self):
|
188
|
+
"""Get the deployment pipeline stage"""
|
189
|
+
return self.core_client.get_deployment_pipeline_stage(deployment_pipeline_id=self.deployment_pipeline_id, stage_id=self.id)
|
190
|
+
|
128
191
|
def list_items(self):
|
129
|
-
return self.core_client.
|
130
|
-
|
192
|
+
return self.core_client.list_deployment_pipeline_stage_items(deployment_pipeline_id=self.deployment_pipeline_id, stage_id=self.id)
|
193
|
+
|
194
|
+
def unassign_workspacee(self):
|
195
|
+
"""Unassign the workspace from the stage"""
|
196
|
+
resp = self.core_client.unassign_workspace_from_stage(deployment_pipeline_id=self.deployment_pipeline_id, stage_id=self.id)
|
197
|
+
self.workspace_id = None
|
198
|
+
self.workspace_name = None
|
199
|
+
return resp
|
200
|
+
|
201
|
+
def update(self, display_name = None, description = None, is_public = None):
|
202
|
+
"""Update the deployment pipeline stage"""
|
203
|
+
return self.core_client.update_deployment_pipeline_stage(deployment_pipeline_id=self.deployment_pipeline_id, stage_id=self.id, display_name=display_name, description=description, is_public=is_public)
|
msfabricpysdkcore/domain.py
CHANGED
@@ -54,7 +54,10 @@ class Domain:
|
|
54
54
|
if "display_name" not in dic:
|
55
55
|
dic["display_name"] = dic["displayName"]
|
56
56
|
if "parent_domain_id" not in dic:
|
57
|
-
|
57
|
+
if "parentDomainId" in dic:
|
58
|
+
dic["parent_domain_id"] = dic["parentDomainId"]
|
59
|
+
else:
|
60
|
+
dic["parent_domain_id"] = None
|
58
61
|
if "contributors_scope" not in dic:
|
59
62
|
dic["contributors_scope"] = dic["contributorsScope"]
|
60
63
|
return Domain(id=dic['id'], display_name=dic['display_name'],
|
@@ -0,0 +1,68 @@
|
|
1
|
+
from msfabricpysdkcore.item import Item
|
2
|
+
|
3
|
+
class Eventstream(Item):
|
4
|
+
"""Class to represent a eventstream in Microsoft Fabric"""
|
5
|
+
|
6
|
+
def __init__(self, id, display_name, type, workspace_id, core_client, properties=None, definition=None, description=""):
|
7
|
+
super().__init__(id, display_name, type, workspace_id, core_client, properties, definition, description)
|
8
|
+
|
9
|
+
def from_dict(item_dict, core_client):
|
10
|
+
return Eventstream(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
|
11
|
+
properties=item_dict.get('properties', None),
|
12
|
+
definition=item_dict.get('definition', None), description=item_dict.get('description', ""), core_client=core_client)
|
13
|
+
|
14
|
+
def get_definition(self, type=None, format=None):
|
15
|
+
"""Method to get the definition of the eventstream"""
|
16
|
+
return super().get_definition(type="eventstreams", format=format)
|
17
|
+
|
18
|
+
def update_definition(self, definition):
|
19
|
+
"""Method to update the definition of the eventstream"""
|
20
|
+
return self.core_client.update_item_definition(self.workspace_id, self.id, definition, type="eventstreams")
|
21
|
+
|
22
|
+
# eventstream topology
|
23
|
+
def get_eventstream_destination(self, destination_id):
|
24
|
+
"""Get the destination of an eventstream in a workspace"""
|
25
|
+
return self.core_client.get_eventstream_destination(workspace_id=self.workspace_id, eventstream_id=self.id, destination_id=destination_id)
|
26
|
+
|
27
|
+
def get_eventstream_destination_connection(self, destination_id):
|
28
|
+
"""Get the connection of a destination in an eventstream in a workspace"""
|
29
|
+
return self.core_client.get_eventstream_destination_connection(workspace_id=self.workspace_id, eventstream_id=self.id, destination_id=destination_id)
|
30
|
+
|
31
|
+
def get_eventstream_source(self, source_id):
|
32
|
+
"""Get the source of an eventstream in a workspace"""
|
33
|
+
return self.core_client.get_eventstream_source(workspace_id=self.workspace_id, eventstream_id=self.id, source_id=source_id)
|
34
|
+
|
35
|
+
def get_eventstream_source_connection(self, source_id):
|
36
|
+
"""Get the connection of a source in an eventstream in a workspace"""
|
37
|
+
return self.core_client.get_eventstream_source_connection(workspace_id=self.workspace_id, eventstream_id=self.id, source_id=source_id)
|
38
|
+
|
39
|
+
def get_eventstream_topology(self):
|
40
|
+
"""Get the topology of an eventstream in a workspace"""
|
41
|
+
return self.core_client.get_eventstream_topology(workspace_id=self.workspace_id, eventstream_id=self.id)
|
42
|
+
|
43
|
+
def pause_eventstream(self):
|
44
|
+
"""Pause an eventstream in a workspace"""
|
45
|
+
return self.core_client.pause_eventstream(workspace_id=self.workspace_id, eventstream_id=self.id)
|
46
|
+
|
47
|
+
def pause_eventstream_destination(self, destination_id):
|
48
|
+
"""Pause a destination in an eventstream in a workspace"""
|
49
|
+
return self.core_client.pause_eventstream_destination(workspace_id=self.workspace_id, eventstream_id=self.id, destination_id=destination_id)
|
50
|
+
|
51
|
+
def pause_eventstream_source(self, source_id):
|
52
|
+
"""Pause a source in an eventstream in a workspace"""
|
53
|
+
return self.core_client.pause_eventstream_source(workspace_id=self.workspace_id, eventstream_id=self.id, source_id=source_id)
|
54
|
+
|
55
|
+
def resume_eventstream(self, start_type, custom_start_date_time = None):
|
56
|
+
"""Resume an eventstream in a workspace"""
|
57
|
+
return self.core_client.resume_eventstream(workspace_id=self.workspace_id, eventstream_id=self.id, start_type=start_type, custom_start_date_time=custom_start_date_time)
|
58
|
+
|
59
|
+
def resume_eventstream_destination(self, destination_id, start_type, custom_start_date_time = None):
|
60
|
+
"""Resume a destination in an eventstream in a workspace"""
|
61
|
+
return self.core_client.resume_eventstream_destination(workspace_id=self.workspace_id, eventstream_id=self.id,
|
62
|
+
destination_id=destination_id, start_type=start_type, custom_start_date_time=custom_start_date_time)
|
63
|
+
|
64
|
+
def resume_eventstream_source(self, source_id, start_type, custom_start_date_time = None):
|
65
|
+
"""Resume a source in an eventstream in a workspace"""
|
66
|
+
return self.core_client.resume_eventstream_source(workspace_id=self.workspace_id, eventstream_id=self.id,
|
67
|
+
source_id=source_id, start_type=start_type, custom_start_date_time=custom_start_date_time)
|
68
|
+
|
@@ -0,0 +1,69 @@
|
|
1
|
+
import json
|
2
|
+
|
3
|
+
from msfabricpysdkcore.coreapi import FabricClientCore
|
4
|
+
|
5
|
+
class Folder:
|
6
|
+
"""Class to represent a folder in Microsoft Fabric"""
|
7
|
+
|
8
|
+
def __init__(self, id, display_name, workspace_id, core_client: FabricClientCore, parent_folder_id) -> None:
|
9
|
+
|
10
|
+
self.id = id
|
11
|
+
self.display_name = display_name
|
12
|
+
self.parent_folder_id = parent_folder_id
|
13
|
+
self.workspace_id = workspace_id
|
14
|
+
|
15
|
+
self.core_client = core_client
|
16
|
+
|
17
|
+
def __str__(self) -> str:
|
18
|
+
"""Return a string representation of the workspace object"""
|
19
|
+
dict_ = {
|
20
|
+
'id': self.id,
|
21
|
+
'display_name': self.display_name,
|
22
|
+
'parent_folder_id': self.parent_folder_id,
|
23
|
+
'workspace_id': self.workspace_id,
|
24
|
+
}
|
25
|
+
return json.dumps(dict_, indent=2)
|
26
|
+
|
27
|
+
def __repr__(self) -> str:
|
28
|
+
return self.__str__()
|
29
|
+
|
30
|
+
def from_dict(folder_dict, core_client):
|
31
|
+
"""Create Folder object from dictionary"""
|
32
|
+
|
33
|
+
return Folder(id=folder_dict['id'], display_name=folder_dict['displayName'], workspace_id=folder_dict['workspaceId'],
|
34
|
+
parent_folder_id=folder_dict.get('parentFolderId', ""), core_client=core_client)
|
35
|
+
|
36
|
+
def delete(self):
|
37
|
+
"""Delete the folder"""
|
38
|
+
return self.core_client.delete_folder(workspace_id=self.workspace_id, folder_id=self.id)
|
39
|
+
|
40
|
+
def get(self):
|
41
|
+
"""Get the folder"""
|
42
|
+
returned_folder = self.core_client.get_folder(workspace_id=self.workspace_id, folder_id=self.id)
|
43
|
+
self.display_name = returned_folder.display_name
|
44
|
+
self.parent_folder_id = returned_folder.parent_folder_id
|
45
|
+
self.workspace_id = returned_folder.workspace_id
|
46
|
+
return returned_folder
|
47
|
+
|
48
|
+
def move(self, target_folder_id = None):
|
49
|
+
"""Move a folder
|
50
|
+
Args:
|
51
|
+
target_folder_id (str): The ID of the target folder
|
52
|
+
Returns:
|
53
|
+
dict: The moved folder
|
54
|
+
"""
|
55
|
+
moved_folder = self.core_client.move_folder(folder_id=self.id, target_folder_id=target_folder_id)
|
56
|
+
self.id = moved_folder.id
|
57
|
+
self.display_name = moved_folder.display_name
|
58
|
+
self.parent_folder_id = moved_folder.parent_folder_id
|
59
|
+
self.workspace_id = moved_folder.workspace_id
|
60
|
+
|
61
|
+
return moved_folder
|
62
|
+
|
63
|
+
def update(self, display_name = None):
|
64
|
+
"""Update the folder"""
|
65
|
+
updated_folder = self.core_client.update_folder(workspace_id=self.workspace_id, folder_id=self.id, display_name=display_name)
|
66
|
+
self.display_name = updated_folder.display_name
|
67
|
+
self.parent_folder_id = updated_folder.parent_folder_id
|
68
|
+
self.workspace_id = updated_folder.workspace_id
|
69
|
+
return updated_folder
|
msfabricpysdkcore/item.py
CHANGED
@@ -79,6 +79,15 @@ class Item:
|
|
79
79
|
self.definition = definition
|
80
80
|
return response
|
81
81
|
|
82
|
+
# Tags
|
83
|
+
def apply_tags(self, tags):
|
84
|
+
"""Apply tags to an item in a workspace"""
|
85
|
+
return self.core_client.apply_tags(workspace_id=self.workspace_id, item_id=self.id, tags=tags)
|
86
|
+
|
87
|
+
def unapply_tags(self, tags):
|
88
|
+
"""Unapply tags from an item in a workspace"""
|
89
|
+
return self.core_client.unapply_tags(workspace_id=self.workspace_id, item_id=self.id, tags=tags)
|
90
|
+
|
82
91
|
# Shortcut
|
83
92
|
|
84
93
|
def create_shortcut(self, path, name, target):
|
msfabricpysdkcore/lakehouse.py
CHANGED
@@ -24,4 +24,12 @@ class Lakehouse(Item):
|
|
24
24
|
|
25
25
|
def run_on_demand_table_maintenance(self, execution_data, job_type = "TableMaintenance", wait_for_completion = True):
|
26
26
|
"""Run on demand table maintenance"""
|
27
|
-
return self.core_client.run_on_demand_table_maintenance(self.workspace_id, self.id, execution_data, job_type, wait_for_completion)
|
27
|
+
return self.core_client.run_on_demand_table_maintenance(self.workspace_id, self.id, execution_data, job_type, wait_for_completion)
|
28
|
+
|
29
|
+
def list_livy_sessions(self):
|
30
|
+
"""List all livy sessions in the lakehouse"""
|
31
|
+
return self.core_client.list_lakehouse_livy_sessions(self.workspace_id, self.id)
|
32
|
+
|
33
|
+
def get_livy_session(self, livy_id):
|
34
|
+
"""Get a livy session in the lakehouse"""
|
35
|
+
return self.core_client.get_lakehouse_livy_session(self.workspace_id, self.id, livy_id)
|
msfabricpysdkcore/otheritems.py
CHANGED
@@ -1,6 +1,42 @@
|
|
1
1
|
from msfabricpysdkcore.item import Item
|
2
2
|
|
3
3
|
|
4
|
+
class CopyJob(Item):
|
5
|
+
"""Class to represent a copy job in Microsoft Fabric"""
|
6
|
+
|
7
|
+
def __init__(self, id, display_name, type, workspace_id, core_client, properties = None, definition=None, description=""):
|
8
|
+
super().__init__(id, display_name, type, workspace_id, core_client, properties, definition, description)
|
9
|
+
|
10
|
+
def from_dict(item_dict, core_client):
|
11
|
+
return CopyJob(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
|
12
|
+
properties=item_dict.get('properties', None),
|
13
|
+
definition=item_dict.get('definition', None), description=item_dict.get('description', ""), core_client=core_client)
|
14
|
+
|
15
|
+
class Dataflow(Item):
|
16
|
+
"""Class to represent a dataflow in Microsoft Fabric"""
|
17
|
+
|
18
|
+
def __init__(self, id, display_name, type, workspace_id, core_client, properties = None, definition=None, description=""):
|
19
|
+
super().__init__(id, display_name, type, workspace_id, core_client, properties, definition, description)
|
20
|
+
|
21
|
+
def from_dict(item_dict, core_client):
|
22
|
+
return Dataflow(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
|
23
|
+
properties=item_dict.get('properties', None),
|
24
|
+
definition=item_dict.get('definition', None), description=item_dict.get('description', ""), core_client=core_client)
|
25
|
+
|
26
|
+
class DataPipeline(Item):
|
27
|
+
"""Class to represent a spark job definition in Microsoft Fabric"""
|
28
|
+
|
29
|
+
def __init__(self, id, display_name, type, workspace_id, core_client, properties = None, definition=None, description=""):
|
30
|
+
super().__init__(id, display_name, type, workspace_id, core_client, properties, definition, description)
|
31
|
+
|
32
|
+
def from_dict(item_dict, core_client):
|
33
|
+
return DataPipeline(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
|
34
|
+
properties=item_dict.get('properties', None),
|
35
|
+
definition=item_dict.get('definition', None), description=item_dict.get('description', ""), core_client=core_client)
|
36
|
+
|
37
|
+
def run_on_demand_item_job(self, execution_data=None):
|
38
|
+
return self.core_client.run_on_demand_item_job(workspace_id=self.workspace_id, item_id=self.id, job_type="Pipeline", execution_data=execution_data)
|
39
|
+
|
4
40
|
class Eventhouse(Item):
|
5
41
|
"""Class to represent a eventhouse in Microsoft Fabric"""
|
6
42
|
|
@@ -56,6 +92,13 @@ class SparkJobDefinition(Item):
|
|
56
92
|
def run_on_demand_spark_job_definition(self, job_type = "sparkjob"):
|
57
93
|
return self.core_client.run_on_demand_spark_job_definition(workspace_id=self.workspace_id, spark_job_definition_id=self.id, job_type=job_type)
|
58
94
|
|
95
|
+
def list_livy_sessions(self):
|
96
|
+
"""List all livy sessions in the spark job definition"""
|
97
|
+
return self.core_client.list_spark_job_definition_livy_sessions(self.workspace_id, self.id)
|
98
|
+
|
99
|
+
def get_livy_session(self, livy_id):
|
100
|
+
"""Get a livy session in the spark job definition"""
|
101
|
+
return self.core_client.get_spark_job_definition_livy_session(self.workspace_id, self.id, livy_id)
|
59
102
|
|
60
103
|
class Warehouse(Item):
|
61
104
|
"""Class to represent a warehouse in Microsoft Fabric"""
|
@@ -126,31 +169,11 @@ class KQLQueryset(Item):
|
|
126
169
|
return self.core_client.update_item_definition(self.workspace_id, self.id, definition, type="kqlQuerysets",
|
127
170
|
update_metadata=update_metadata)
|
128
171
|
|
129
|
-
|
130
|
-
|
131
|
-
class Eventstream(Item):
|
132
|
-
"""Class to represent a eventstream in Microsoft Fabric"""
|
133
|
-
|
134
|
-
def __init__(self, id, display_name, type, workspace_id, core_client, properties = None, definition=None, description=""):
|
135
|
-
super().__init__(id, display_name, type, workspace_id, core_client, properties, definition, description)
|
136
|
-
|
137
|
-
def from_dict(item_dict, core_client):
|
138
|
-
return Eventstream(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
|
139
|
-
properties=item_dict.get('properties', None),
|
140
|
-
definition=item_dict.get('definition', None), description=item_dict.get('description', ""), core_client=core_client)
|
141
|
-
|
142
|
-
def get_definition(self, type=None, format=None):
|
143
|
-
"""Method to get the definition of the eventstream"""
|
144
|
-
return super().get_definition(type="eventstreams", format=format)
|
145
|
-
|
146
|
-
def update_definition(self, definition):
|
147
|
-
"""Method to update the definition of the eventstream"""
|
148
|
-
return self.core_client.update_item_definition(self.workspace_id, self.id, definition, type="eventstreams")
|
149
172
|
|
150
173
|
class GraphQLApi(Item):
|
151
174
|
"""Class to represent a graphql api in Microsoft Fabric"""
|
152
|
-
|
153
|
-
def __init__(self, id, display_name, type, workspace_id, core_client, properties
|
175
|
+
|
176
|
+
def __init__(self, id, display_name, type, workspace_id, core_client, properties=None, definition=None, description=""):
|
154
177
|
super().__init__(id, display_name, type, workspace_id, core_client, properties, definition, description)
|
155
178
|
|
156
179
|
def from_dict(item_dict, core_client):
|
@@ -251,6 +274,14 @@ class Notebook(Item):
|
|
251
274
|
def update_definition(self, definition):
|
252
275
|
"""Method to update the definition of the notebook"""
|
253
276
|
return self.core_client.update_item_definition(self.workspace_id, self.id, definition, type="notebooks")
|
277
|
+
|
278
|
+
def list_livy_sessions(self):
|
279
|
+
"""List all livy sessions in the notebook"""
|
280
|
+
return self.core_client.list_notebook_livy_sessions(self.workspace_id, self.id)
|
281
|
+
|
282
|
+
def get_livy_session(self, livy_id):
|
283
|
+
"""Get a livy session in the notebook"""
|
284
|
+
return self.core_client.get_notebook_livy_session(self.workspace_id, self.id, livy_id)
|
254
285
|
|
255
286
|
class Reflex(Item):
|
256
287
|
"""Class to represent a reflex in Microsoft Fabric"""
|
@@ -320,25 +351,13 @@ class SQLDatabase(Item):
|
|
320
351
|
properties=item_dict.get('properties', None),
|
321
352
|
definition=item_dict.get('definition', None), description=item_dict.get('description', ""), core_client=core_client)
|
322
353
|
|
323
|
-
class
|
324
|
-
"""Class to represent a
|
325
|
-
|
326
|
-
def __init__(self, id, display_name, type, workspace_id, core_client, properties
|
354
|
+
class VariableLibrary(Item):
|
355
|
+
"""Class to represent a variable library in Microsoft Fabric"""
|
356
|
+
|
357
|
+
def __init__(self, id, display_name, type, workspace_id, core_client, properties=None, definition=None, description=""):
|
327
358
|
super().__init__(id, display_name, type, workspace_id, core_client, properties, definition, description)
|
328
359
|
|
329
360
|
def from_dict(item_dict, core_client):
|
330
|
-
return
|
361
|
+
return VariableLibrary(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
|
331
362
|
properties=item_dict.get('properties', None),
|
332
|
-
definition=item_dict.get('definition', None), description=item_dict.get('description', ""), core_client=core_client)
|
333
|
-
|
334
|
-
def get_definition(self, type=None, format=None, **kwargs):
|
335
|
-
return super().get_definition(type="dataPipelines", format=format, **kwargs)
|
336
|
-
|
337
|
-
def update_definition(self, definition):
|
338
|
-
"""Method to update the definition of the dataPipeline"""
|
339
|
-
return self.core_client.update_item_definition(self.workspace_id, self.id, definition, type="dataPipelines")
|
340
|
-
|
341
|
-
def run_on_demand_item_job(self, execution_data=None):
|
342
|
-
return self.core_client.run_on_demand_item_job(workspace_id=self.workspace_id, item_id=self.id, job_type="Pipeline", execution_data=execution_data)
|
343
|
-
|
344
|
-
|
363
|
+
definition=item_dict.get('definition', None), description=item_dict.get('description', ""), core_client=core_client)
|
@@ -0,0 +1,46 @@
|
|
1
|
+
import unittest
|
2
|
+
from dotenv import load_dotenv
|
3
|
+
from msfabricpysdkcore import FabricClientAdmin
|
4
|
+
|
5
|
+
load_dotenv()
|
6
|
+
|
7
|
+
class TestFabricClientCore(unittest.TestCase):
|
8
|
+
|
9
|
+
def __init__(self, *args, **kwargs):
|
10
|
+
super(TestFabricClientCore, self).__init__(*args, **kwargs)
|
11
|
+
self.fca = FabricClientAdmin()
|
12
|
+
|
13
|
+
def test_admin_api(self):
|
14
|
+
fca = self.fca
|
15
|
+
|
16
|
+
sdk_tag = [tag for tag in fca.list_tags() if tag["displayName"] == "sdk_tag_temp"]
|
17
|
+
if len(sdk_tag) > 0:
|
18
|
+
sdk_tag = sdk_tag[0]
|
19
|
+
resp = fca.delete_tag(tag_id=sdk_tag["id"])
|
20
|
+
self.assertEqual(resp, 200)
|
21
|
+
|
22
|
+
new_tags = [{"displayName": "sdk_tag_temp"}]
|
23
|
+
resp = fca.bulk_create_tags(create_tags_request=new_tags)
|
24
|
+
self.assertEqual(len(resp["tags"]), 1)
|
25
|
+
resp = resp["tags"][0]
|
26
|
+
self.assertEqual(resp["displayName"], "sdk_tag_temp")
|
27
|
+
|
28
|
+
sdk_tag = [tag for tag in fca.list_tags() if tag["displayName"] == "sdk_tag_temp"]
|
29
|
+
self.assertEqual(len(sdk_tag), 1)
|
30
|
+
sdk_tag = sdk_tag[0]
|
31
|
+
|
32
|
+
self.assertIsNotNone(sdk_tag["id"])
|
33
|
+
|
34
|
+
resp = fca.update_tag(tag_id=sdk_tag["id"], display_name="sdk_tag_updated")
|
35
|
+
self.assertIsNotNone(resp["id"])
|
36
|
+
self.assertEqual(resp["displayName"], "sdk_tag_updated")
|
37
|
+
|
38
|
+
|
39
|
+
resp = fca.delete_tag(tag_id=resp["id"])
|
40
|
+
self.assertEqual(resp, 200)
|
41
|
+
|
42
|
+
sdk_tag = [tag for tag in fca.list_tags() if tag["displayName"] == "sdk_tag_temp"]
|
43
|
+
self.assertEqual(len(sdk_tag), 0)
|
44
|
+
|
45
|
+
|
46
|
+
|
@@ -0,0 +1,60 @@
|
|
1
|
+
import unittest
|
2
|
+
from dotenv import load_dotenv
|
3
|
+
from msfabricpysdkcore import FabricClientCore
|
4
|
+
from datetime import datetime
|
5
|
+
load_dotenv()
|
6
|
+
|
7
|
+
class TestFabricClientCore(unittest.TestCase):
|
8
|
+
|
9
|
+
def __init__(self, *args, **kwargs):
|
10
|
+
super(TestFabricClientCore, self).__init__(*args, **kwargs)
|
11
|
+
self.fcc = FabricClientCore()
|
12
|
+
|
13
|
+
def test_copy_jobs(self):
|
14
|
+
fcc = self.fcc
|
15
|
+
|
16
|
+
workspace_id = "05bc5baa-ef02-4a31-ab20-158a478151d3"
|
17
|
+
item_id = "a9e59ec1-524b-49b1-a185-37e47dc0ceb9"
|
18
|
+
|
19
|
+
copy_jobs = fcc.list_copy_jobs(workspace_id=workspace_id)
|
20
|
+
for copy_job in copy_jobs:
|
21
|
+
if copy_job.id != item_id:
|
22
|
+
resp = fcc.delete_copy_job(workspace_id=workspace_id, copy_job_id=copy_job.id)
|
23
|
+
self.assertEqual(resp, 200)
|
24
|
+
|
25
|
+
copy_job_definition = fcc.get_copy_job_definition(workspace_id=workspace_id, copy_job_id=item_id)
|
26
|
+
self.assertIn("definition", copy_job_definition)
|
27
|
+
definition = copy_job_definition["definition"]
|
28
|
+
|
29
|
+
date_str = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
30
|
+
date_str = date_str.replace(" ", "T").replace(":", "").replace("-", "")
|
31
|
+
date_str = f"copyjob{date_str}"
|
32
|
+
|
33
|
+
copy_job_new = fcc.create_copy_job(workspace_id=workspace_id, display_name=date_str, definition=definition)
|
34
|
+
|
35
|
+
self.assertEqual(copy_job_new.display_name, date_str)
|
36
|
+
|
37
|
+
copy_job_get = fcc.get_copy_job(workspace_id=workspace_id, copy_job_id=copy_job_new.id)
|
38
|
+
self.assertEqual(copy_job_get.display_name, date_str)
|
39
|
+
|
40
|
+
copy_jobs = fcc.list_copy_jobs(workspace_id=workspace_id)
|
41
|
+
self.assertEqual(len(copy_jobs), 2)
|
42
|
+
|
43
|
+
date_str_updated = date_str + "_updated"
|
44
|
+
copy_job_updated = fcc.update_copy_job(workspace_id=workspace_id, copy_job_id=copy_job_new.id, display_name=date_str_updated, return_item=True)
|
45
|
+
self.assertEqual(copy_job_updated.display_name, date_str_updated)
|
46
|
+
|
47
|
+
copy_job_updated = fcc.update_copy_job_definition(workspace_id=workspace_id, copy_job_id=copy_job_new.id, definition=definition)
|
48
|
+
self.assertEqual(copy_job_updated.status_code, 200)
|
49
|
+
|
50
|
+
for copy_job in copy_jobs:
|
51
|
+
if copy_job.id != item_id:
|
52
|
+
resp = fcc.delete_copy_job(workspace_id=workspace_id, copy_job_id=copy_job.id)
|
53
|
+
self.assertEqual(resp, 200)
|
54
|
+
|
55
|
+
|
56
|
+
|
57
|
+
|
58
|
+
|
59
|
+
|
60
|
+
|