msgraph-sdk 1.49.0__py3-none-any.whl → 1.50.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- msgraph/_version.py +1 -1
- msgraph/generated/chats/item/chat_item_request_builder.py +10 -0
- msgraph/generated/chats/item/remove_all_access_for_user/remove_all_access_for_user_post_request_body.py +57 -0
- msgraph/generated/chats/item/remove_all_access_for_user/remove_all_access_for_user_request_builder.py +87 -0
- msgraph/generated/drives/item/items/item/retention_label/retention_label_request_builder.py +4 -3
- msgraph/generated/kiota-dom-export.txt +1830 -6
- msgraph/generated/kiota-lock.json +1 -1
- msgraph/generated/models/assigned_place_mode.py +61 -0
- msgraph/generated/models/base_map_feature.py +104 -0
- msgraph/generated/models/building.py +67 -0
- msgraph/generated/models/building_collection_response.py +58 -0
- msgraph/generated/models/building_map.py +71 -0
- msgraph/generated/models/channel.py +1 -1
- msgraph/generated/models/communications_guest_identity.py +4 -0
- msgraph/generated/models/copilot_report_root.py +53 -0
- msgraph/generated/models/desk.py +71 -0
- msgraph/generated/models/desk_collection_response.py +58 -0
- msgraph/generated/models/drop_in_place_mode.py +53 -0
- msgraph/generated/models/education_assignment.py +1 -1
- msgraph/generated/models/education_assignment_defaults.py +1 -1
- msgraph/generated/models/entity.py +177 -0
- msgraph/generated/models/external_connectors/property_.py +1 -1
- msgraph/generated/models/file_storage_container.py +7 -0
- msgraph/generated/models/fixture_map.py +57 -0
- msgraph/generated/models/fixture_map_collection_response.py +58 -0
- msgraph/generated/models/floor.py +57 -0
- msgraph/generated/models/floor_collection_response.py +58 -0
- msgraph/generated/models/footprint_map.py +53 -0
- msgraph/generated/models/footprint_map_collection_response.py +58 -0
- msgraph/generated/models/identity_governance/user_inactivity_trigger.py +57 -0
- msgraph/generated/models/identity_governance/workflow_execution_trigger.py +7 -0
- msgraph/generated/models/level_map.py +78 -0
- msgraph/generated/models/level_map_collection_response.py +58 -0
- msgraph/generated/models/mailbox_details.py +58 -0
- msgraph/generated/models/place.py +51 -0
- msgraph/generated/models/place_mode.py +80 -0
- msgraph/generated/models/planner_plan_container.py +1 -1
- msgraph/generated/models/reservable_place_mode.py +53 -0
- msgraph/generated/models/resource_link.py +69 -0
- msgraph/generated/models/resource_link_type.py +6 -0
- msgraph/generated/models/restore_session_base.py +1 -1
- msgraph/generated/models/risk_detection.py +1 -1
- msgraph/generated/models/risk_service_principal_activity.py +1 -1
- msgraph/generated/models/risk_user_activity.py +1 -1
- msgraph/generated/models/risky_service_principal.py +1 -1
- msgraph/generated/models/risky_user.py +1 -1
- msgraph/generated/models/room.py +0 -12
- msgraph/generated/models/room_list.py +7 -0
- msgraph/generated/models/search_request.py +1 -1
- msgraph/generated/models/section.py +53 -0
- msgraph/generated/models/section_collection_response.py +58 -0
- msgraph/generated/models/section_map.py +57 -0
- msgraph/generated/models/section_map_collection_response.py +58 -0
- msgraph/generated/models/security/account.py +72 -0
- msgraph/generated/models/security/action.py +11 -0
- msgraph/generated/models/security/ai_agent_evidence.py +72 -0
- msgraph/generated/models/security/ai_agent_platform.py +9 -0
- msgraph/generated/models/security/alert_evidence.py +7 -0
- msgraph/generated/models/security/ediscovery_export_operation.py +1 -1
- msgraph/generated/models/security/identity_accounts.py +92 -0
- msgraph/generated/models/security/identity_accounts_collection_response.py +58 -0
- msgraph/generated/models/security/identity_container.py +7 -0
- msgraph/generated/models/security/identity_provider.py +8 -0
- msgraph/generated/models/security/invoke_action_result.py +76 -0
- msgraph/generated/models/security/user.py +61 -0
- msgraph/generated/models/service_principal_risk_detection.py +2 -2
- msgraph/generated/models/share_point_migration_container_info.py +62 -0
- msgraph/generated/models/share_point_migration_event.py +127 -0
- msgraph/generated/models/share_point_migration_event_collection_response.py +58 -0
- msgraph/generated/models/share_point_migration_finish_manifest_file_upload_event.py +57 -0
- msgraph/generated/models/share_point_migration_job.py +67 -0
- msgraph/generated/models/share_point_migration_job_cancelled_event.py +61 -0
- msgraph/generated/models/share_point_migration_job_collection_response.py +58 -0
- msgraph/generated/models/share_point_migration_job_deleted_event.py +53 -0
- msgraph/generated/models/share_point_migration_job_error_event.py +86 -0
- msgraph/generated/models/share_point_migration_job_error_level.py +9 -0
- msgraph/generated/models/share_point_migration_job_postponed_event.py +70 -0
- msgraph/generated/models/share_point_migration_job_progress_event.py +121 -0
- msgraph/generated/models/share_point_migration_job_queued_event.py +53 -0
- msgraph/generated/models/share_point_migration_job_start_event.py +61 -0
- msgraph/generated/models/share_point_migration_object_type.py +14 -0
- msgraph/generated/models/sign_in.py +1 -1
- msgraph/generated/models/subject_rights_request_history.py +1 -1
- msgraph/generated/models/subject_rights_request_stage_detail.py +1 -1
- msgraph/generated/models/unit_map.py +57 -0
- msgraph/generated/models/unit_map_collection_response.py +58 -0
- msgraph/generated/models/workforce_integration.py +1 -1
- msgraph/generated/models/workspace.py +76 -0
- msgraph/generated/models/workspace_collection_response.py +58 -0
- msgraph/generated/places/graph_building/count/count_request_builder.py +104 -0
- msgraph/generated/places/graph_building/graph_building_request_builder.py +147 -0
- msgraph/generated/places/graph_desk/count/count_request_builder.py +104 -0
- msgraph/generated/places/graph_desk/graph_desk_request_builder.py +147 -0
- msgraph/generated/places/graph_floor/count/count_request_builder.py +104 -0
- msgraph/generated/places/graph_floor/graph_floor_request_builder.py +147 -0
- msgraph/generated/places/graph_section/count/count_request_builder.py +104 -0
- msgraph/generated/places/graph_section/graph_section_request_builder.py +147 -0
- msgraph/generated/places/graph_workspace/count/count_request_builder.py +104 -0
- msgraph/generated/places/graph_workspace/graph_workspace_request_builder.py +147 -0
- msgraph/generated/places/item/descendants/descendants_get_response.py +58 -0
- msgraph/generated/places/item/descendants/descendants_request_builder.py +137 -0
- msgraph/generated/places/item/graph_building/check_ins/check_ins_request_builder.py +208 -0
- msgraph/generated/places/item/graph_building/check_ins/count/count_request_builder.py +104 -0
- msgraph/generated/places/item/graph_building/check_ins/item/check_in_claim_calendar_event_item_request_builder.py +188 -0
- msgraph/generated/places/item/graph_building/graph_building_request_builder.py +127 -0
- msgraph/generated/places/item/graph_building/map/footprints/count/count_request_builder.py +104 -0
- msgraph/generated/places/item/graph_building/map/footprints/footprints_request_builder.py +208 -0
- msgraph/generated/places/item/graph_building/map/footprints/item/footprint_map_item_request_builder.py +188 -0
- msgraph/generated/places/item/graph_building/map/levels/count/count_request_builder.py +104 -0
- msgraph/generated/places/item/graph_building/map/levels/item/fixtures/count/count_request_builder.py +104 -0
- msgraph/generated/places/item/graph_building/map/levels/item/fixtures/fixtures_request_builder.py +208 -0
- msgraph/generated/places/item/graph_building/map/levels/item/fixtures/item/fixture_map_item_request_builder.py +188 -0
- msgraph/generated/places/item/graph_building/map/levels/item/level_map_item_request_builder.py +218 -0
- msgraph/generated/places/item/graph_building/map/levels/item/sections/count/count_request_builder.py +104 -0
- msgraph/generated/places/item/graph_building/map/levels/item/sections/item/section_map_item_request_builder.py +188 -0
- msgraph/generated/places/item/graph_building/map/levels/item/sections/sections_request_builder.py +208 -0
- msgraph/generated/places/item/graph_building/map/levels/item/units/count/count_request_builder.py +104 -0
- msgraph/generated/places/item/graph_building/map/levels/item/units/item/unit_map_item_request_builder.py +188 -0
- msgraph/generated/places/item/graph_building/map/levels/item/units/units_request_builder.py +208 -0
- msgraph/generated/places/item/graph_building/map/levels/levels_request_builder.py +208 -0
- msgraph/generated/places/item/graph_building/map/map_request_builder.py +208 -0
- msgraph/generated/places/item/graph_desk/check_ins/check_ins_request_builder.py +208 -0
- msgraph/generated/places/item/graph_desk/check_ins/count/count_request_builder.py +104 -0
- msgraph/generated/places/item/graph_desk/check_ins/item/check_in_claim_calendar_event_item_request_builder.py +188 -0
- msgraph/generated/places/item/graph_desk/graph_desk_request_builder.py +117 -0
- msgraph/generated/places/item/graph_floor/check_ins/check_ins_request_builder.py +208 -0
- msgraph/generated/places/item/graph_floor/check_ins/count/count_request_builder.py +104 -0
- msgraph/generated/places/item/graph_floor/check_ins/item/check_in_claim_calendar_event_item_request_builder.py +188 -0
- msgraph/generated/places/item/graph_floor/graph_floor_request_builder.py +117 -0
- msgraph/generated/places/item/graph_room_list/graph_room_list_request_builder.py +10 -0
- msgraph/generated/places/item/graph_room_list/workspaces/count/count_request_builder.py +104 -0
- msgraph/generated/places/item/graph_room_list/workspaces/item/check_ins/check_ins_request_builder.py +208 -0
- msgraph/generated/places/item/graph_room_list/workspaces/item/check_ins/count/count_request_builder.py +104 -0
- msgraph/generated/places/item/graph_room_list/workspaces/item/check_ins/item/check_in_claim_calendar_event_item_request_builder.py +188 -0
- msgraph/generated/places/item/graph_room_list/workspaces/item/workspace_item_request_builder.py +198 -0
- msgraph/generated/places/item/graph_room_list/workspaces/workspaces_request_builder.py +208 -0
- msgraph/generated/places/item/graph_section/check_ins/check_ins_request_builder.py +208 -0
- msgraph/generated/places/item/graph_section/check_ins/count/count_request_builder.py +104 -0
- msgraph/generated/places/item/graph_section/check_ins/item/check_in_claim_calendar_event_item_request_builder.py +188 -0
- msgraph/generated/places/item/graph_section/graph_section_request_builder.py +117 -0
- msgraph/generated/places/item/graph_workspace/check_ins/check_ins_request_builder.py +208 -0
- msgraph/generated/places/item/graph_workspace/check_ins/count/count_request_builder.py +104 -0
- msgraph/generated/places/item/graph_workspace/check_ins/item/check_in_claim_calendar_event_item_request_builder.py +188 -0
- msgraph/generated/places/item/graph_workspace/graph_workspace_request_builder.py +117 -0
- msgraph/generated/places/item/place_item_request_builder.py +60 -0
- msgraph/generated/places/places_request_builder.py +50 -0
- msgraph/generated/security/identities/identities_request_builder.py +10 -0
- msgraph/generated/security/identities/identity_accounts/count/count_request_builder.py +104 -0
- msgraph/generated/security/identities/identity_accounts/identity_accounts_request_builder.py +209 -0
- msgraph/generated/security/identities/identity_accounts/item/identity_accounts_item_request_builder.py +199 -0
- msgraph/generated/security/identities/identity_accounts/item/microsoft_graph_security_invoke_action/invoke_action_post_request_body.py +68 -0
- msgraph/generated/security/identities/identity_accounts/item/microsoft_graph_security_invoke_action/microsoft_graph_security_invoke_action_request_builder.py +90 -0
- msgraph/generated/solutions/virtual_events/webinars/item/registrations/item/sessions/sessions_request_builder.py +3 -3
- msgraph/generated/storage/file_storage/containers/item/file_storage_container_item_request_builder.py +20 -0
- msgraph/generated/storage/file_storage/containers/item/migration_jobs/count/count_request_builder.py +104 -0
- msgraph/generated/storage/file_storage/containers/item/migration_jobs/item/progress_events/count/count_request_builder.py +104 -0
- msgraph/generated/storage/file_storage/containers/item/migration_jobs/item/progress_events/item/share_point_migration_event_item_request_builder.py +188 -0
- msgraph/generated/storage/file_storage/containers/item/migration_jobs/item/progress_events/progress_events_request_builder.py +208 -0
- msgraph/generated/storage/file_storage/containers/item/migration_jobs/item/share_point_migration_job_item_request_builder.py +198 -0
- msgraph/generated/storage/file_storage/containers/item/migration_jobs/migration_jobs_request_builder.py +208 -0
- msgraph/generated/storage/file_storage/containers/item/provision_migration_containers/provision_migration_containers_request_builder.py +81 -0
- msgraph/generated/storage/file_storage/deleted_containers/item/file_storage_container_item_request_builder.py +20 -0
- msgraph/generated/storage/file_storage/deleted_containers/item/migration_jobs/count/count_request_builder.py +104 -0
- msgraph/generated/storage/file_storage/deleted_containers/item/migration_jobs/item/progress_events/count/count_request_builder.py +104 -0
- msgraph/generated/storage/file_storage/deleted_containers/item/migration_jobs/item/progress_events/item/share_point_migration_event_item_request_builder.py +188 -0
- msgraph/generated/storage/file_storage/deleted_containers/item/migration_jobs/item/progress_events/progress_events_request_builder.py +208 -0
- msgraph/generated/storage/file_storage/deleted_containers/item/migration_jobs/item/share_point_migration_job_item_request_builder.py +198 -0
- msgraph/generated/storage/file_storage/deleted_containers/item/migration_jobs/migration_jobs_request_builder.py +208 -0
- msgraph/generated/storage/file_storage/deleted_containers/item/provision_migration_containers/provision_migration_containers_request_builder.py +81 -0
- msgraph/generated/users/item/chats/item/chat_item_request_builder.py +10 -0
- msgraph/generated/users/item/chats/item/remove_all_access_for_user/remove_all_access_for_user_post_request_body.py +57 -0
- msgraph/generated/users/item/chats/item/remove_all_access_for_user/remove_all_access_for_user_request_builder.py +87 -0
- {msgraph_sdk-1.49.0.dist-info → msgraph_sdk-1.50.0.dist-info}/METADATA +1 -1
- {msgraph_sdk-1.49.0.dist-info → msgraph_sdk-1.50.0.dist-info}/RECORD +176 -44
- {msgraph_sdk-1.49.0.dist-info → msgraph_sdk-1.50.0.dist-info}/WHEEL +0 -0
- {msgraph_sdk-1.49.0.dist-info → msgraph_sdk-1.50.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
from collections.abc import Callable
|
|
3
|
+
from dataclasses import dataclass, field
|
|
4
|
+
from kiota_abstractions.serialization import Parsable, ParseNode, SerializationWriter
|
|
5
|
+
from typing import Any, Optional, TYPE_CHECKING, Union
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from .identity_accounts import IdentityAccounts
|
|
9
|
+
|
|
10
|
+
from .identity_accounts import IdentityAccounts
|
|
11
|
+
|
|
12
|
+
@dataclass
|
|
13
|
+
class User(IdentityAccounts, Parsable):
|
|
14
|
+
# The OdataType property
|
|
15
|
+
odata_type: Optional[str] = "#microsoft.graph.security.user"
|
|
16
|
+
# Email address of the user.
|
|
17
|
+
email_address: Optional[str] = None
|
|
18
|
+
# The user principal name.
|
|
19
|
+
user_principal_name: Optional[str] = None
|
|
20
|
+
|
|
21
|
+
@staticmethod
|
|
22
|
+
def create_from_discriminator_value(parse_node: ParseNode) -> User:
|
|
23
|
+
"""
|
|
24
|
+
Creates a new instance of the appropriate class based on discriminator value
|
|
25
|
+
param parse_node: The parse node to use to read the discriminator value and create the object
|
|
26
|
+
Returns: User
|
|
27
|
+
"""
|
|
28
|
+
if parse_node is None:
|
|
29
|
+
raise TypeError("parse_node cannot be null.")
|
|
30
|
+
return User()
|
|
31
|
+
|
|
32
|
+
def get_field_deserializers(self,) -> dict[str, Callable[[ParseNode], None]]:
|
|
33
|
+
"""
|
|
34
|
+
The deserialization information for the current model
|
|
35
|
+
Returns: dict[str, Callable[[ParseNode], None]]
|
|
36
|
+
"""
|
|
37
|
+
from .identity_accounts import IdentityAccounts
|
|
38
|
+
|
|
39
|
+
from .identity_accounts import IdentityAccounts
|
|
40
|
+
|
|
41
|
+
fields: dict[str, Callable[[Any], None]] = {
|
|
42
|
+
"emailAddress": lambda n : setattr(self, 'email_address', n.get_str_value()),
|
|
43
|
+
"userPrincipalName": lambda n : setattr(self, 'user_principal_name', n.get_str_value()),
|
|
44
|
+
}
|
|
45
|
+
super_fields = super().get_field_deserializers()
|
|
46
|
+
fields.update(super_fields)
|
|
47
|
+
return fields
|
|
48
|
+
|
|
49
|
+
def serialize(self,writer: SerializationWriter) -> None:
|
|
50
|
+
"""
|
|
51
|
+
Serializes information the current object
|
|
52
|
+
param writer: Serialization writer to use to serialize this model
|
|
53
|
+
Returns: None
|
|
54
|
+
"""
|
|
55
|
+
if writer is None:
|
|
56
|
+
raise TypeError("writer cannot be null.")
|
|
57
|
+
super().serialize(writer)
|
|
58
|
+
writer.write_str_value("emailAddress", self.email_address)
|
|
59
|
+
writer.write_str_value("userPrincipalName", self.user_principal_name)
|
|
60
|
+
|
|
61
|
+
|
|
@@ -19,7 +19,7 @@ from .entity import Entity
|
|
|
19
19
|
|
|
20
20
|
@dataclass
|
|
21
21
|
class ServicePrincipalRiskDetection(Entity, Parsable):
|
|
22
|
-
# Indicates the activity type the detected risk is linked to. The possible values are: signin, servicePrincipal. Use the Prefer: include-unknown-enum-members request header to get the following
|
|
22
|
+
# Indicates the activity type the detected risk is linked to. The possible values are: signin, servicePrincipal. Use the Prefer: include-unknown-enum-members request header to get the following members in this evolvable enum: servicePrincipal.
|
|
23
23
|
activity: Optional[ActivityType] = None
|
|
24
24
|
# Date and time when the risky activity occurred. The DateTimeOffset type represents date and time information using ISO 8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 is 2014-01-01T00:00:00Z
|
|
25
25
|
activity_date_time: Optional[datetime.datetime] = None
|
|
@@ -45,7 +45,7 @@ class ServicePrincipalRiskDetection(Entity, Parsable):
|
|
|
45
45
|
odata_type: Optional[str] = None
|
|
46
46
|
# Request identifier of the sign-in activity associated with the risk detection. This property is null if the risk detection is not associated with a sign-in activity. Supports $filter (eq).
|
|
47
47
|
request_id: Optional[str] = None
|
|
48
|
-
# Details of the detected risk. Note: Details for this property are only available for Workload Identities Premium customers. Events in tenants without this license will be returned hidden.
|
|
48
|
+
# Details of the detected risk. Note: Details for this property are only available for Workload Identities Premium customers. Events in tenants without this license will be returned hidden.
|
|
49
49
|
risk_detail: Optional[RiskDetail] = None
|
|
50
50
|
# The type of risk event detected. The possible values are: investigationsThreatIntelligence, generic, adminConfirmedServicePrincipalCompromised, suspiciousSignins, leakedCredentials, anomalousServicePrincipalActivity, maliciousApplication, suspiciousApplication.
|
|
51
51
|
risk_event_type: Optional[str] = None
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
from collections.abc import Callable
|
|
3
|
+
from dataclasses import dataclass, field
|
|
4
|
+
from kiota_abstractions.serialization import AdditionalDataHolder, Parsable, ParseNode, SerializationWriter
|
|
5
|
+
from kiota_abstractions.store import BackedModel, BackingStore, BackingStoreFactorySingleton
|
|
6
|
+
from typing import Any, Optional, TYPE_CHECKING, Union
|
|
7
|
+
|
|
8
|
+
@dataclass
|
|
9
|
+
class SharePointMigrationContainerInfo(AdditionalDataHolder, BackedModel, Parsable):
|
|
10
|
+
# Stores model information.
|
|
11
|
+
backing_store: BackingStore = field(default_factory=BackingStoreFactorySingleton(backing_store_factory=None).backing_store_factory.create_backing_store, repr=False)
|
|
12
|
+
|
|
13
|
+
# Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
|
|
14
|
+
additional_data: dict[str, Any] = field(default_factory=dict)
|
|
15
|
+
# A valid URL with a SAS token for accessing the Azure blob storage container that contains the file content. Read-only.
|
|
16
|
+
data_container_uri: Optional[str] = None
|
|
17
|
+
# Provides the AES-256-CBC encryption key if files stored in Azure blob containers are encrypted. The key is Base64-encoded. Read-only.
|
|
18
|
+
encryption_key: Optional[str] = None
|
|
19
|
+
# A valid URL with a SAS token for accessing the Azure blob storage container that contains the file metadata. Read-only.
|
|
20
|
+
metadata_container_uri: Optional[str] = None
|
|
21
|
+
# The OdataType property
|
|
22
|
+
odata_type: Optional[str] = None
|
|
23
|
+
|
|
24
|
+
@staticmethod
|
|
25
|
+
def create_from_discriminator_value(parse_node: ParseNode) -> SharePointMigrationContainerInfo:
|
|
26
|
+
"""
|
|
27
|
+
Creates a new instance of the appropriate class based on discriminator value
|
|
28
|
+
param parse_node: The parse node to use to read the discriminator value and create the object
|
|
29
|
+
Returns: SharePointMigrationContainerInfo
|
|
30
|
+
"""
|
|
31
|
+
if parse_node is None:
|
|
32
|
+
raise TypeError("parse_node cannot be null.")
|
|
33
|
+
return SharePointMigrationContainerInfo()
|
|
34
|
+
|
|
35
|
+
def get_field_deserializers(self,) -> dict[str, Callable[[ParseNode], None]]:
|
|
36
|
+
"""
|
|
37
|
+
The deserialization information for the current model
|
|
38
|
+
Returns: dict[str, Callable[[ParseNode], None]]
|
|
39
|
+
"""
|
|
40
|
+
fields: dict[str, Callable[[Any], None]] = {
|
|
41
|
+
"dataContainerUri": lambda n : setattr(self, 'data_container_uri', n.get_str_value()),
|
|
42
|
+
"encryptionKey": lambda n : setattr(self, 'encryption_key', n.get_str_value()),
|
|
43
|
+
"metadataContainerUri": lambda n : setattr(self, 'metadata_container_uri', n.get_str_value()),
|
|
44
|
+
"@odata.type": lambda n : setattr(self, 'odata_type', n.get_str_value()),
|
|
45
|
+
}
|
|
46
|
+
return fields
|
|
47
|
+
|
|
48
|
+
def serialize(self,writer: SerializationWriter) -> None:
|
|
49
|
+
"""
|
|
50
|
+
Serializes information the current object
|
|
51
|
+
param writer: Serialization writer to use to serialize this model
|
|
52
|
+
Returns: None
|
|
53
|
+
"""
|
|
54
|
+
if writer is None:
|
|
55
|
+
raise TypeError("writer cannot be null.")
|
|
56
|
+
writer.write_str_value("dataContainerUri", self.data_container_uri)
|
|
57
|
+
writer.write_str_value("encryptionKey", self.encryption_key)
|
|
58
|
+
writer.write_str_value("metadataContainerUri", self.metadata_container_uri)
|
|
59
|
+
writer.write_str_value("@odata.type", self.odata_type)
|
|
60
|
+
writer.write_additional_data_value(self.additional_data)
|
|
61
|
+
|
|
62
|
+
|
|
@@ -0,0 +1,127 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
import datetime
|
|
3
|
+
from collections.abc import Callable
|
|
4
|
+
from dataclasses import dataclass, field
|
|
5
|
+
from kiota_abstractions.serialization import Parsable, ParseNode, SerializationWriter
|
|
6
|
+
from typing import Any, Optional, TYPE_CHECKING, Union
|
|
7
|
+
|
|
8
|
+
if TYPE_CHECKING:
|
|
9
|
+
from .entity import Entity
|
|
10
|
+
from .share_point_migration_finish_manifest_file_upload_event import SharePointMigrationFinishManifestFileUploadEvent
|
|
11
|
+
from .share_point_migration_job_cancelled_event import SharePointMigrationJobCancelledEvent
|
|
12
|
+
from .share_point_migration_job_deleted_event import SharePointMigrationJobDeletedEvent
|
|
13
|
+
from .share_point_migration_job_error_event import SharePointMigrationJobErrorEvent
|
|
14
|
+
from .share_point_migration_job_postponed_event import SharePointMigrationJobPostponedEvent
|
|
15
|
+
from .share_point_migration_job_progress_event import SharePointMigrationJobProgressEvent
|
|
16
|
+
from .share_point_migration_job_queued_event import SharePointMigrationJobQueuedEvent
|
|
17
|
+
from .share_point_migration_job_start_event import SharePointMigrationJobStartEvent
|
|
18
|
+
|
|
19
|
+
from .entity import Entity
|
|
20
|
+
|
|
21
|
+
@dataclass
|
|
22
|
+
class SharePointMigrationEvent(Entity, Parsable):
|
|
23
|
+
# The correlation ID of a migration job. Read-only.
|
|
24
|
+
correlation_id: Optional[str] = None
|
|
25
|
+
# The date and time when the job status changes. The timestamp type represents date and time information using ISO 8601 format and is always in UTC. For example, midnight UTC on Jan 1, 2014 is 2014-01-01T00:00:00Z. Read-only.
|
|
26
|
+
event_date_time: Optional[datetime.datetime] = None
|
|
27
|
+
# The unique identifier of a migration job. Read-only.
|
|
28
|
+
job_id: Optional[str] = None
|
|
29
|
+
# The OdataType property
|
|
30
|
+
odata_type: Optional[str] = None
|
|
31
|
+
|
|
32
|
+
@staticmethod
|
|
33
|
+
def create_from_discriminator_value(parse_node: ParseNode) -> SharePointMigrationEvent:
|
|
34
|
+
"""
|
|
35
|
+
Creates a new instance of the appropriate class based on discriminator value
|
|
36
|
+
param parse_node: The parse node to use to read the discriminator value and create the object
|
|
37
|
+
Returns: SharePointMigrationEvent
|
|
38
|
+
"""
|
|
39
|
+
if parse_node is None:
|
|
40
|
+
raise TypeError("parse_node cannot be null.")
|
|
41
|
+
try:
|
|
42
|
+
child_node = parse_node.get_child_node("@odata.type")
|
|
43
|
+
mapping_value = child_node.get_str_value() if child_node else None
|
|
44
|
+
except AttributeError:
|
|
45
|
+
mapping_value = None
|
|
46
|
+
if mapping_value and mapping_value.casefold() == "#microsoft.graph.sharePointMigrationFinishManifestFileUploadEvent".casefold():
|
|
47
|
+
from .share_point_migration_finish_manifest_file_upload_event import SharePointMigrationFinishManifestFileUploadEvent
|
|
48
|
+
|
|
49
|
+
return SharePointMigrationFinishManifestFileUploadEvent()
|
|
50
|
+
if mapping_value and mapping_value.casefold() == "#microsoft.graph.sharePointMigrationJobCancelledEvent".casefold():
|
|
51
|
+
from .share_point_migration_job_cancelled_event import SharePointMigrationJobCancelledEvent
|
|
52
|
+
|
|
53
|
+
return SharePointMigrationJobCancelledEvent()
|
|
54
|
+
if mapping_value and mapping_value.casefold() == "#microsoft.graph.sharePointMigrationJobDeletedEvent".casefold():
|
|
55
|
+
from .share_point_migration_job_deleted_event import SharePointMigrationJobDeletedEvent
|
|
56
|
+
|
|
57
|
+
return SharePointMigrationJobDeletedEvent()
|
|
58
|
+
if mapping_value and mapping_value.casefold() == "#microsoft.graph.sharePointMigrationJobErrorEvent".casefold():
|
|
59
|
+
from .share_point_migration_job_error_event import SharePointMigrationJobErrorEvent
|
|
60
|
+
|
|
61
|
+
return SharePointMigrationJobErrorEvent()
|
|
62
|
+
if mapping_value and mapping_value.casefold() == "#microsoft.graph.sharePointMigrationJobPostponedEvent".casefold():
|
|
63
|
+
from .share_point_migration_job_postponed_event import SharePointMigrationJobPostponedEvent
|
|
64
|
+
|
|
65
|
+
return SharePointMigrationJobPostponedEvent()
|
|
66
|
+
if mapping_value and mapping_value.casefold() == "#microsoft.graph.sharePointMigrationJobProgressEvent".casefold():
|
|
67
|
+
from .share_point_migration_job_progress_event import SharePointMigrationJobProgressEvent
|
|
68
|
+
|
|
69
|
+
return SharePointMigrationJobProgressEvent()
|
|
70
|
+
if mapping_value and mapping_value.casefold() == "#microsoft.graph.sharePointMigrationJobQueuedEvent".casefold():
|
|
71
|
+
from .share_point_migration_job_queued_event import SharePointMigrationJobQueuedEvent
|
|
72
|
+
|
|
73
|
+
return SharePointMigrationJobQueuedEvent()
|
|
74
|
+
if mapping_value and mapping_value.casefold() == "#microsoft.graph.sharePointMigrationJobStartEvent".casefold():
|
|
75
|
+
from .share_point_migration_job_start_event import SharePointMigrationJobStartEvent
|
|
76
|
+
|
|
77
|
+
return SharePointMigrationJobStartEvent()
|
|
78
|
+
return SharePointMigrationEvent()
|
|
79
|
+
|
|
80
|
+
def get_field_deserializers(self,) -> dict[str, Callable[[ParseNode], None]]:
|
|
81
|
+
"""
|
|
82
|
+
The deserialization information for the current model
|
|
83
|
+
Returns: dict[str, Callable[[ParseNode], None]]
|
|
84
|
+
"""
|
|
85
|
+
from .entity import Entity
|
|
86
|
+
from .share_point_migration_finish_manifest_file_upload_event import SharePointMigrationFinishManifestFileUploadEvent
|
|
87
|
+
from .share_point_migration_job_cancelled_event import SharePointMigrationJobCancelledEvent
|
|
88
|
+
from .share_point_migration_job_deleted_event import SharePointMigrationJobDeletedEvent
|
|
89
|
+
from .share_point_migration_job_error_event import SharePointMigrationJobErrorEvent
|
|
90
|
+
from .share_point_migration_job_postponed_event import SharePointMigrationJobPostponedEvent
|
|
91
|
+
from .share_point_migration_job_progress_event import SharePointMigrationJobProgressEvent
|
|
92
|
+
from .share_point_migration_job_queued_event import SharePointMigrationJobQueuedEvent
|
|
93
|
+
from .share_point_migration_job_start_event import SharePointMigrationJobStartEvent
|
|
94
|
+
|
|
95
|
+
from .entity import Entity
|
|
96
|
+
from .share_point_migration_finish_manifest_file_upload_event import SharePointMigrationFinishManifestFileUploadEvent
|
|
97
|
+
from .share_point_migration_job_cancelled_event import SharePointMigrationJobCancelledEvent
|
|
98
|
+
from .share_point_migration_job_deleted_event import SharePointMigrationJobDeletedEvent
|
|
99
|
+
from .share_point_migration_job_error_event import SharePointMigrationJobErrorEvent
|
|
100
|
+
from .share_point_migration_job_postponed_event import SharePointMigrationJobPostponedEvent
|
|
101
|
+
from .share_point_migration_job_progress_event import SharePointMigrationJobProgressEvent
|
|
102
|
+
from .share_point_migration_job_queued_event import SharePointMigrationJobQueuedEvent
|
|
103
|
+
from .share_point_migration_job_start_event import SharePointMigrationJobStartEvent
|
|
104
|
+
|
|
105
|
+
fields: dict[str, Callable[[Any], None]] = {
|
|
106
|
+
"correlationId": lambda n : setattr(self, 'correlation_id', n.get_str_value()),
|
|
107
|
+
"eventDateTime": lambda n : setattr(self, 'event_date_time', n.get_datetime_value()),
|
|
108
|
+
"jobId": lambda n : setattr(self, 'job_id', n.get_str_value()),
|
|
109
|
+
}
|
|
110
|
+
super_fields = super().get_field_deserializers()
|
|
111
|
+
fields.update(super_fields)
|
|
112
|
+
return fields
|
|
113
|
+
|
|
114
|
+
def serialize(self,writer: SerializationWriter) -> None:
|
|
115
|
+
"""
|
|
116
|
+
Serializes information the current object
|
|
117
|
+
param writer: Serialization writer to use to serialize this model
|
|
118
|
+
Returns: None
|
|
119
|
+
"""
|
|
120
|
+
if writer is None:
|
|
121
|
+
raise TypeError("writer cannot be null.")
|
|
122
|
+
super().serialize(writer)
|
|
123
|
+
writer.write_str_value("correlationId", self.correlation_id)
|
|
124
|
+
writer.write_datetime_value("eventDateTime", self.event_date_time)
|
|
125
|
+
writer.write_str_value("jobId", self.job_id)
|
|
126
|
+
|
|
127
|
+
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
from collections.abc import Callable
|
|
3
|
+
from dataclasses import dataclass, field
|
|
4
|
+
from kiota_abstractions.serialization import Parsable, ParseNode, SerializationWriter
|
|
5
|
+
from typing import Any, Optional, TYPE_CHECKING, Union
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from .base_collection_pagination_count_response import BaseCollectionPaginationCountResponse
|
|
9
|
+
from .share_point_migration_event import SharePointMigrationEvent
|
|
10
|
+
|
|
11
|
+
from .base_collection_pagination_count_response import BaseCollectionPaginationCountResponse
|
|
12
|
+
|
|
13
|
+
@dataclass
|
|
14
|
+
class SharePointMigrationEventCollectionResponse(BaseCollectionPaginationCountResponse, Parsable):
|
|
15
|
+
# The value property
|
|
16
|
+
value: Optional[list[SharePointMigrationEvent]] = None
|
|
17
|
+
|
|
18
|
+
@staticmethod
|
|
19
|
+
def create_from_discriminator_value(parse_node: ParseNode) -> SharePointMigrationEventCollectionResponse:
|
|
20
|
+
"""
|
|
21
|
+
Creates a new instance of the appropriate class based on discriminator value
|
|
22
|
+
param parse_node: The parse node to use to read the discriminator value and create the object
|
|
23
|
+
Returns: SharePointMigrationEventCollectionResponse
|
|
24
|
+
"""
|
|
25
|
+
if parse_node is None:
|
|
26
|
+
raise TypeError("parse_node cannot be null.")
|
|
27
|
+
return SharePointMigrationEventCollectionResponse()
|
|
28
|
+
|
|
29
|
+
def get_field_deserializers(self,) -> dict[str, Callable[[ParseNode], None]]:
|
|
30
|
+
"""
|
|
31
|
+
The deserialization information for the current model
|
|
32
|
+
Returns: dict[str, Callable[[ParseNode], None]]
|
|
33
|
+
"""
|
|
34
|
+
from .base_collection_pagination_count_response import BaseCollectionPaginationCountResponse
|
|
35
|
+
from .share_point_migration_event import SharePointMigrationEvent
|
|
36
|
+
|
|
37
|
+
from .base_collection_pagination_count_response import BaseCollectionPaginationCountResponse
|
|
38
|
+
from .share_point_migration_event import SharePointMigrationEvent
|
|
39
|
+
|
|
40
|
+
fields: dict[str, Callable[[Any], None]] = {
|
|
41
|
+
"value": lambda n : setattr(self, 'value', n.get_collection_of_object_values(SharePointMigrationEvent)),
|
|
42
|
+
}
|
|
43
|
+
super_fields = super().get_field_deserializers()
|
|
44
|
+
fields.update(super_fields)
|
|
45
|
+
return fields
|
|
46
|
+
|
|
47
|
+
def serialize(self,writer: SerializationWriter) -> None:
|
|
48
|
+
"""
|
|
49
|
+
Serializes information the current object
|
|
50
|
+
param writer: Serialization writer to use to serialize this model
|
|
51
|
+
Returns: None
|
|
52
|
+
"""
|
|
53
|
+
if writer is None:
|
|
54
|
+
raise TypeError("writer cannot be null.")
|
|
55
|
+
super().serialize(writer)
|
|
56
|
+
writer.write_collection_of_object_values("value", self.value)
|
|
57
|
+
|
|
58
|
+
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
from collections.abc import Callable
|
|
3
|
+
from dataclasses import dataclass, field
|
|
4
|
+
from kiota_abstractions.serialization import Parsable, ParseNode, SerializationWriter
|
|
5
|
+
from typing import Any, Optional, TYPE_CHECKING, Union
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from .share_point_migration_event import SharePointMigrationEvent
|
|
9
|
+
|
|
10
|
+
from .share_point_migration_event import SharePointMigrationEvent
|
|
11
|
+
|
|
12
|
+
@dataclass
|
|
13
|
+
class SharePointMigrationFinishManifestFileUploadEvent(SharePointMigrationEvent, Parsable):
|
|
14
|
+
# The exported manifest file name. Read-only.
|
|
15
|
+
manifest_file_name: Optional[str] = None
|
|
16
|
+
# The OdataType property
|
|
17
|
+
odata_type: Optional[str] = None
|
|
18
|
+
|
|
19
|
+
@staticmethod
|
|
20
|
+
def create_from_discriminator_value(parse_node: ParseNode) -> SharePointMigrationFinishManifestFileUploadEvent:
|
|
21
|
+
"""
|
|
22
|
+
Creates a new instance of the appropriate class based on discriminator value
|
|
23
|
+
param parse_node: The parse node to use to read the discriminator value and create the object
|
|
24
|
+
Returns: SharePointMigrationFinishManifestFileUploadEvent
|
|
25
|
+
"""
|
|
26
|
+
if parse_node is None:
|
|
27
|
+
raise TypeError("parse_node cannot be null.")
|
|
28
|
+
return SharePointMigrationFinishManifestFileUploadEvent()
|
|
29
|
+
|
|
30
|
+
def get_field_deserializers(self,) -> dict[str, Callable[[ParseNode], None]]:
|
|
31
|
+
"""
|
|
32
|
+
The deserialization information for the current model
|
|
33
|
+
Returns: dict[str, Callable[[ParseNode], None]]
|
|
34
|
+
"""
|
|
35
|
+
from .share_point_migration_event import SharePointMigrationEvent
|
|
36
|
+
|
|
37
|
+
from .share_point_migration_event import SharePointMigrationEvent
|
|
38
|
+
|
|
39
|
+
fields: dict[str, Callable[[Any], None]] = {
|
|
40
|
+
"manifestFileName": lambda n : setattr(self, 'manifest_file_name', n.get_str_value()),
|
|
41
|
+
}
|
|
42
|
+
super_fields = super().get_field_deserializers()
|
|
43
|
+
fields.update(super_fields)
|
|
44
|
+
return fields
|
|
45
|
+
|
|
46
|
+
def serialize(self,writer: SerializationWriter) -> None:
|
|
47
|
+
"""
|
|
48
|
+
Serializes information the current object
|
|
49
|
+
param writer: Serialization writer to use to serialize this model
|
|
50
|
+
Returns: None
|
|
51
|
+
"""
|
|
52
|
+
if writer is None:
|
|
53
|
+
raise TypeError("writer cannot be null.")
|
|
54
|
+
super().serialize(writer)
|
|
55
|
+
writer.write_str_value("manifestFileName", self.manifest_file_name)
|
|
56
|
+
|
|
57
|
+
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
from collections.abc import Callable
|
|
3
|
+
from dataclasses import dataclass, field
|
|
4
|
+
from kiota_abstractions.serialization import Parsable, ParseNode, SerializationWriter
|
|
5
|
+
from typing import Any, Optional, TYPE_CHECKING, Union
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from .entity import Entity
|
|
9
|
+
from .share_point_migration_container_info import SharePointMigrationContainerInfo
|
|
10
|
+
from .share_point_migration_event import SharePointMigrationEvent
|
|
11
|
+
|
|
12
|
+
from .entity import Entity
|
|
13
|
+
|
|
14
|
+
@dataclass
|
|
15
|
+
class SharePointMigrationJob(Entity, Parsable):
|
|
16
|
+
# The containerInfo property
|
|
17
|
+
container_info: Optional[SharePointMigrationContainerInfo] = None
|
|
18
|
+
# The OdataType property
|
|
19
|
+
odata_type: Optional[str] = None
|
|
20
|
+
# A collection of migration events that reflects the job status changes.
|
|
21
|
+
progress_events: Optional[list[SharePointMigrationEvent]] = None
|
|
22
|
+
|
|
23
|
+
@staticmethod
|
|
24
|
+
def create_from_discriminator_value(parse_node: ParseNode) -> SharePointMigrationJob:
|
|
25
|
+
"""
|
|
26
|
+
Creates a new instance of the appropriate class based on discriminator value
|
|
27
|
+
param parse_node: The parse node to use to read the discriminator value and create the object
|
|
28
|
+
Returns: SharePointMigrationJob
|
|
29
|
+
"""
|
|
30
|
+
if parse_node is None:
|
|
31
|
+
raise TypeError("parse_node cannot be null.")
|
|
32
|
+
return SharePointMigrationJob()
|
|
33
|
+
|
|
34
|
+
def get_field_deserializers(self,) -> dict[str, Callable[[ParseNode], None]]:
|
|
35
|
+
"""
|
|
36
|
+
The deserialization information for the current model
|
|
37
|
+
Returns: dict[str, Callable[[ParseNode], None]]
|
|
38
|
+
"""
|
|
39
|
+
from .entity import Entity
|
|
40
|
+
from .share_point_migration_container_info import SharePointMigrationContainerInfo
|
|
41
|
+
from .share_point_migration_event import SharePointMigrationEvent
|
|
42
|
+
|
|
43
|
+
from .entity import Entity
|
|
44
|
+
from .share_point_migration_container_info import SharePointMigrationContainerInfo
|
|
45
|
+
from .share_point_migration_event import SharePointMigrationEvent
|
|
46
|
+
|
|
47
|
+
fields: dict[str, Callable[[Any], None]] = {
|
|
48
|
+
"containerInfo": lambda n : setattr(self, 'container_info', n.get_object_value(SharePointMigrationContainerInfo)),
|
|
49
|
+
"progressEvents": lambda n : setattr(self, 'progress_events', n.get_collection_of_object_values(SharePointMigrationEvent)),
|
|
50
|
+
}
|
|
51
|
+
super_fields = super().get_field_deserializers()
|
|
52
|
+
fields.update(super_fields)
|
|
53
|
+
return fields
|
|
54
|
+
|
|
55
|
+
def serialize(self,writer: SerializationWriter) -> None:
|
|
56
|
+
"""
|
|
57
|
+
Serializes information the current object
|
|
58
|
+
param writer: Serialization writer to use to serialize this model
|
|
59
|
+
Returns: None
|
|
60
|
+
"""
|
|
61
|
+
if writer is None:
|
|
62
|
+
raise TypeError("writer cannot be null.")
|
|
63
|
+
super().serialize(writer)
|
|
64
|
+
writer.write_object_value("containerInfo", self.container_info)
|
|
65
|
+
writer.write_collection_of_object_values("progressEvents", self.progress_events)
|
|
66
|
+
|
|
67
|
+
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
from collections.abc import Callable
|
|
3
|
+
from dataclasses import dataclass, field
|
|
4
|
+
from kiota_abstractions.serialization import Parsable, ParseNode, SerializationWriter
|
|
5
|
+
from typing import Any, Optional, TYPE_CHECKING, Union
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from .share_point_migration_event import SharePointMigrationEvent
|
|
9
|
+
|
|
10
|
+
from .share_point_migration_event import SharePointMigrationEvent
|
|
11
|
+
|
|
12
|
+
@dataclass
|
|
13
|
+
class SharePointMigrationJobCancelledEvent(SharePointMigrationEvent, Parsable):
|
|
14
|
+
# True when a user cancels the job; otherwise, false. Read-only.
|
|
15
|
+
is_cancelled_by_user: Optional[bool] = None
|
|
16
|
+
# The OdataType property
|
|
17
|
+
odata_type: Optional[str] = None
|
|
18
|
+
# The current retry count of the job. Read-only.
|
|
19
|
+
total_retry_count: Optional[int] = None
|
|
20
|
+
|
|
21
|
+
@staticmethod
|
|
22
|
+
def create_from_discriminator_value(parse_node: ParseNode) -> SharePointMigrationJobCancelledEvent:
|
|
23
|
+
"""
|
|
24
|
+
Creates a new instance of the appropriate class based on discriminator value
|
|
25
|
+
param parse_node: The parse node to use to read the discriminator value and create the object
|
|
26
|
+
Returns: SharePointMigrationJobCancelledEvent
|
|
27
|
+
"""
|
|
28
|
+
if parse_node is None:
|
|
29
|
+
raise TypeError("parse_node cannot be null.")
|
|
30
|
+
return SharePointMigrationJobCancelledEvent()
|
|
31
|
+
|
|
32
|
+
def get_field_deserializers(self,) -> dict[str, Callable[[ParseNode], None]]:
|
|
33
|
+
"""
|
|
34
|
+
The deserialization information for the current model
|
|
35
|
+
Returns: dict[str, Callable[[ParseNode], None]]
|
|
36
|
+
"""
|
|
37
|
+
from .share_point_migration_event import SharePointMigrationEvent
|
|
38
|
+
|
|
39
|
+
from .share_point_migration_event import SharePointMigrationEvent
|
|
40
|
+
|
|
41
|
+
fields: dict[str, Callable[[Any], None]] = {
|
|
42
|
+
"isCancelledByUser": lambda n : setattr(self, 'is_cancelled_by_user', n.get_bool_value()),
|
|
43
|
+
"totalRetryCount": lambda n : setattr(self, 'total_retry_count', n.get_int_value()),
|
|
44
|
+
}
|
|
45
|
+
super_fields = super().get_field_deserializers()
|
|
46
|
+
fields.update(super_fields)
|
|
47
|
+
return fields
|
|
48
|
+
|
|
49
|
+
def serialize(self,writer: SerializationWriter) -> None:
|
|
50
|
+
"""
|
|
51
|
+
Serializes information the current object
|
|
52
|
+
param writer: Serialization writer to use to serialize this model
|
|
53
|
+
Returns: None
|
|
54
|
+
"""
|
|
55
|
+
if writer is None:
|
|
56
|
+
raise TypeError("writer cannot be null.")
|
|
57
|
+
super().serialize(writer)
|
|
58
|
+
writer.write_bool_value("isCancelledByUser", self.is_cancelled_by_user)
|
|
59
|
+
writer.write_int_value("totalRetryCount", self.total_retry_count)
|
|
60
|
+
|
|
61
|
+
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
from collections.abc import Callable
|
|
3
|
+
from dataclasses import dataclass, field
|
|
4
|
+
from kiota_abstractions.serialization import Parsable, ParseNode, SerializationWriter
|
|
5
|
+
from typing import Any, Optional, TYPE_CHECKING, Union
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from .base_collection_pagination_count_response import BaseCollectionPaginationCountResponse
|
|
9
|
+
from .share_point_migration_job import SharePointMigrationJob
|
|
10
|
+
|
|
11
|
+
from .base_collection_pagination_count_response import BaseCollectionPaginationCountResponse
|
|
12
|
+
|
|
13
|
+
@dataclass
|
|
14
|
+
class SharePointMigrationJobCollectionResponse(BaseCollectionPaginationCountResponse, Parsable):
|
|
15
|
+
# The value property
|
|
16
|
+
value: Optional[list[SharePointMigrationJob]] = None
|
|
17
|
+
|
|
18
|
+
@staticmethod
|
|
19
|
+
def create_from_discriminator_value(parse_node: ParseNode) -> SharePointMigrationJobCollectionResponse:
|
|
20
|
+
"""
|
|
21
|
+
Creates a new instance of the appropriate class based on discriminator value
|
|
22
|
+
param parse_node: The parse node to use to read the discriminator value and create the object
|
|
23
|
+
Returns: SharePointMigrationJobCollectionResponse
|
|
24
|
+
"""
|
|
25
|
+
if parse_node is None:
|
|
26
|
+
raise TypeError("parse_node cannot be null.")
|
|
27
|
+
return SharePointMigrationJobCollectionResponse()
|
|
28
|
+
|
|
29
|
+
def get_field_deserializers(self,) -> dict[str, Callable[[ParseNode], None]]:
|
|
30
|
+
"""
|
|
31
|
+
The deserialization information for the current model
|
|
32
|
+
Returns: dict[str, Callable[[ParseNode], None]]
|
|
33
|
+
"""
|
|
34
|
+
from .base_collection_pagination_count_response import BaseCollectionPaginationCountResponse
|
|
35
|
+
from .share_point_migration_job import SharePointMigrationJob
|
|
36
|
+
|
|
37
|
+
from .base_collection_pagination_count_response import BaseCollectionPaginationCountResponse
|
|
38
|
+
from .share_point_migration_job import SharePointMigrationJob
|
|
39
|
+
|
|
40
|
+
fields: dict[str, Callable[[Any], None]] = {
|
|
41
|
+
"value": lambda n : setattr(self, 'value', n.get_collection_of_object_values(SharePointMigrationJob)),
|
|
42
|
+
}
|
|
43
|
+
super_fields = super().get_field_deserializers()
|
|
44
|
+
fields.update(super_fields)
|
|
45
|
+
return fields
|
|
46
|
+
|
|
47
|
+
def serialize(self,writer: SerializationWriter) -> None:
|
|
48
|
+
"""
|
|
49
|
+
Serializes information the current object
|
|
50
|
+
param writer: Serialization writer to use to serialize this model
|
|
51
|
+
Returns: None
|
|
52
|
+
"""
|
|
53
|
+
if writer is None:
|
|
54
|
+
raise TypeError("writer cannot be null.")
|
|
55
|
+
super().serialize(writer)
|
|
56
|
+
writer.write_collection_of_object_values("value", self.value)
|
|
57
|
+
|
|
58
|
+
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
from collections.abc import Callable
|
|
3
|
+
from dataclasses import dataclass, field
|
|
4
|
+
from kiota_abstractions.serialization import Parsable, ParseNode, SerializationWriter
|
|
5
|
+
from typing import Any, Optional, TYPE_CHECKING, Union
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from .share_point_migration_event import SharePointMigrationEvent
|
|
9
|
+
|
|
10
|
+
from .share_point_migration_event import SharePointMigrationEvent
|
|
11
|
+
|
|
12
|
+
@dataclass
|
|
13
|
+
class SharePointMigrationJobDeletedEvent(SharePointMigrationEvent, Parsable):
|
|
14
|
+
# The OdataType property
|
|
15
|
+
odata_type: Optional[str] = None
|
|
16
|
+
|
|
17
|
+
@staticmethod
|
|
18
|
+
def create_from_discriminator_value(parse_node: ParseNode) -> SharePointMigrationJobDeletedEvent:
|
|
19
|
+
"""
|
|
20
|
+
Creates a new instance of the appropriate class based on discriminator value
|
|
21
|
+
param parse_node: The parse node to use to read the discriminator value and create the object
|
|
22
|
+
Returns: SharePointMigrationJobDeletedEvent
|
|
23
|
+
"""
|
|
24
|
+
if parse_node is None:
|
|
25
|
+
raise TypeError("parse_node cannot be null.")
|
|
26
|
+
return SharePointMigrationJobDeletedEvent()
|
|
27
|
+
|
|
28
|
+
def get_field_deserializers(self,) -> dict[str, Callable[[ParseNode], None]]:
|
|
29
|
+
"""
|
|
30
|
+
The deserialization information for the current model
|
|
31
|
+
Returns: dict[str, Callable[[ParseNode], None]]
|
|
32
|
+
"""
|
|
33
|
+
from .share_point_migration_event import SharePointMigrationEvent
|
|
34
|
+
|
|
35
|
+
from .share_point_migration_event import SharePointMigrationEvent
|
|
36
|
+
|
|
37
|
+
fields: dict[str, Callable[[Any], None]] = {
|
|
38
|
+
}
|
|
39
|
+
super_fields = super().get_field_deserializers()
|
|
40
|
+
fields.update(super_fields)
|
|
41
|
+
return fields
|
|
42
|
+
|
|
43
|
+
def serialize(self,writer: SerializationWriter) -> None:
|
|
44
|
+
"""
|
|
45
|
+
Serializes information the current object
|
|
46
|
+
param writer: Serialization writer to use to serialize this model
|
|
47
|
+
Returns: None
|
|
48
|
+
"""
|
|
49
|
+
if writer is None:
|
|
50
|
+
raise TypeError("writer cannot be null.")
|
|
51
|
+
super().serialize(writer)
|
|
52
|
+
|
|
53
|
+
|