msgraph-sdk 1.49.0__py3-none-any.whl → 1.50.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- msgraph/_version.py +1 -1
- msgraph/generated/chats/item/chat_item_request_builder.py +10 -0
- msgraph/generated/chats/item/remove_all_access_for_user/remove_all_access_for_user_post_request_body.py +57 -0
- msgraph/generated/chats/item/remove_all_access_for_user/remove_all_access_for_user_request_builder.py +87 -0
- msgraph/generated/drives/item/items/item/retention_label/retention_label_request_builder.py +4 -3
- msgraph/generated/kiota-dom-export.txt +1830 -6
- msgraph/generated/kiota-lock.json +1 -1
- msgraph/generated/models/assigned_place_mode.py +61 -0
- msgraph/generated/models/base_map_feature.py +104 -0
- msgraph/generated/models/building.py +67 -0
- msgraph/generated/models/building_collection_response.py +58 -0
- msgraph/generated/models/building_map.py +71 -0
- msgraph/generated/models/channel.py +1 -1
- msgraph/generated/models/communications_guest_identity.py +4 -0
- msgraph/generated/models/copilot_report_root.py +53 -0
- msgraph/generated/models/desk.py +71 -0
- msgraph/generated/models/desk_collection_response.py +58 -0
- msgraph/generated/models/drop_in_place_mode.py +53 -0
- msgraph/generated/models/education_assignment.py +1 -1
- msgraph/generated/models/education_assignment_defaults.py +1 -1
- msgraph/generated/models/entity.py +177 -0
- msgraph/generated/models/external_connectors/property_.py +1 -1
- msgraph/generated/models/file_storage_container.py +7 -0
- msgraph/generated/models/fixture_map.py +57 -0
- msgraph/generated/models/fixture_map_collection_response.py +58 -0
- msgraph/generated/models/floor.py +57 -0
- msgraph/generated/models/floor_collection_response.py +58 -0
- msgraph/generated/models/footprint_map.py +53 -0
- msgraph/generated/models/footprint_map_collection_response.py +58 -0
- msgraph/generated/models/identity_governance/user_inactivity_trigger.py +57 -0
- msgraph/generated/models/identity_governance/workflow_execution_trigger.py +7 -0
- msgraph/generated/models/level_map.py +78 -0
- msgraph/generated/models/level_map_collection_response.py +58 -0
- msgraph/generated/models/mailbox_details.py +58 -0
- msgraph/generated/models/place.py +51 -0
- msgraph/generated/models/place_mode.py +80 -0
- msgraph/generated/models/planner_plan_container.py +1 -1
- msgraph/generated/models/reservable_place_mode.py +53 -0
- msgraph/generated/models/resource_link.py +69 -0
- msgraph/generated/models/resource_link_type.py +6 -0
- msgraph/generated/models/restore_session_base.py +1 -1
- msgraph/generated/models/risk_detection.py +1 -1
- msgraph/generated/models/risk_service_principal_activity.py +1 -1
- msgraph/generated/models/risk_user_activity.py +1 -1
- msgraph/generated/models/risky_service_principal.py +1 -1
- msgraph/generated/models/risky_user.py +1 -1
- msgraph/generated/models/room.py +0 -12
- msgraph/generated/models/room_list.py +7 -0
- msgraph/generated/models/search_request.py +1 -1
- msgraph/generated/models/section.py +53 -0
- msgraph/generated/models/section_collection_response.py +58 -0
- msgraph/generated/models/section_map.py +57 -0
- msgraph/generated/models/section_map_collection_response.py +58 -0
- msgraph/generated/models/security/account.py +72 -0
- msgraph/generated/models/security/action.py +11 -0
- msgraph/generated/models/security/ai_agent_evidence.py +72 -0
- msgraph/generated/models/security/ai_agent_platform.py +9 -0
- msgraph/generated/models/security/alert_evidence.py +7 -0
- msgraph/generated/models/security/ediscovery_export_operation.py +1 -1
- msgraph/generated/models/security/identity_accounts.py +92 -0
- msgraph/generated/models/security/identity_accounts_collection_response.py +58 -0
- msgraph/generated/models/security/identity_container.py +7 -0
- msgraph/generated/models/security/identity_provider.py +8 -0
- msgraph/generated/models/security/invoke_action_result.py +76 -0
- msgraph/generated/models/security/user.py +61 -0
- msgraph/generated/models/service_principal_risk_detection.py +2 -2
- msgraph/generated/models/share_point_migration_container_info.py +62 -0
- msgraph/generated/models/share_point_migration_event.py +127 -0
- msgraph/generated/models/share_point_migration_event_collection_response.py +58 -0
- msgraph/generated/models/share_point_migration_finish_manifest_file_upload_event.py +57 -0
- msgraph/generated/models/share_point_migration_job.py +67 -0
- msgraph/generated/models/share_point_migration_job_cancelled_event.py +61 -0
- msgraph/generated/models/share_point_migration_job_collection_response.py +58 -0
- msgraph/generated/models/share_point_migration_job_deleted_event.py +53 -0
- msgraph/generated/models/share_point_migration_job_error_event.py +86 -0
- msgraph/generated/models/share_point_migration_job_error_level.py +9 -0
- msgraph/generated/models/share_point_migration_job_postponed_event.py +70 -0
- msgraph/generated/models/share_point_migration_job_progress_event.py +121 -0
- msgraph/generated/models/share_point_migration_job_queued_event.py +53 -0
- msgraph/generated/models/share_point_migration_job_start_event.py +61 -0
- msgraph/generated/models/share_point_migration_object_type.py +14 -0
- msgraph/generated/models/sign_in.py +1 -1
- msgraph/generated/models/subject_rights_request_history.py +1 -1
- msgraph/generated/models/subject_rights_request_stage_detail.py +1 -1
- msgraph/generated/models/unit_map.py +57 -0
- msgraph/generated/models/unit_map_collection_response.py +58 -0
- msgraph/generated/models/workforce_integration.py +1 -1
- msgraph/generated/models/workspace.py +76 -0
- msgraph/generated/models/workspace_collection_response.py +58 -0
- msgraph/generated/places/graph_building/count/count_request_builder.py +104 -0
- msgraph/generated/places/graph_building/graph_building_request_builder.py +147 -0
- msgraph/generated/places/graph_desk/count/count_request_builder.py +104 -0
- msgraph/generated/places/graph_desk/graph_desk_request_builder.py +147 -0
- msgraph/generated/places/graph_floor/count/count_request_builder.py +104 -0
- msgraph/generated/places/graph_floor/graph_floor_request_builder.py +147 -0
- msgraph/generated/places/graph_section/count/count_request_builder.py +104 -0
- msgraph/generated/places/graph_section/graph_section_request_builder.py +147 -0
- msgraph/generated/places/graph_workspace/count/count_request_builder.py +104 -0
- msgraph/generated/places/graph_workspace/graph_workspace_request_builder.py +147 -0
- msgraph/generated/places/item/descendants/descendants_get_response.py +58 -0
- msgraph/generated/places/item/descendants/descendants_request_builder.py +137 -0
- msgraph/generated/places/item/graph_building/check_ins/check_ins_request_builder.py +208 -0
- msgraph/generated/places/item/graph_building/check_ins/count/count_request_builder.py +104 -0
- msgraph/generated/places/item/graph_building/check_ins/item/check_in_claim_calendar_event_item_request_builder.py +188 -0
- msgraph/generated/places/item/graph_building/graph_building_request_builder.py +127 -0
- msgraph/generated/places/item/graph_building/map/footprints/count/count_request_builder.py +104 -0
- msgraph/generated/places/item/graph_building/map/footprints/footprints_request_builder.py +208 -0
- msgraph/generated/places/item/graph_building/map/footprints/item/footprint_map_item_request_builder.py +188 -0
- msgraph/generated/places/item/graph_building/map/levels/count/count_request_builder.py +104 -0
- msgraph/generated/places/item/graph_building/map/levels/item/fixtures/count/count_request_builder.py +104 -0
- msgraph/generated/places/item/graph_building/map/levels/item/fixtures/fixtures_request_builder.py +208 -0
- msgraph/generated/places/item/graph_building/map/levels/item/fixtures/item/fixture_map_item_request_builder.py +188 -0
- msgraph/generated/places/item/graph_building/map/levels/item/level_map_item_request_builder.py +218 -0
- msgraph/generated/places/item/graph_building/map/levels/item/sections/count/count_request_builder.py +104 -0
- msgraph/generated/places/item/graph_building/map/levels/item/sections/item/section_map_item_request_builder.py +188 -0
- msgraph/generated/places/item/graph_building/map/levels/item/sections/sections_request_builder.py +208 -0
- msgraph/generated/places/item/graph_building/map/levels/item/units/count/count_request_builder.py +104 -0
- msgraph/generated/places/item/graph_building/map/levels/item/units/item/unit_map_item_request_builder.py +188 -0
- msgraph/generated/places/item/graph_building/map/levels/item/units/units_request_builder.py +208 -0
- msgraph/generated/places/item/graph_building/map/levels/levels_request_builder.py +208 -0
- msgraph/generated/places/item/graph_building/map/map_request_builder.py +208 -0
- msgraph/generated/places/item/graph_desk/check_ins/check_ins_request_builder.py +208 -0
- msgraph/generated/places/item/graph_desk/check_ins/count/count_request_builder.py +104 -0
- msgraph/generated/places/item/graph_desk/check_ins/item/check_in_claim_calendar_event_item_request_builder.py +188 -0
- msgraph/generated/places/item/graph_desk/graph_desk_request_builder.py +117 -0
- msgraph/generated/places/item/graph_floor/check_ins/check_ins_request_builder.py +208 -0
- msgraph/generated/places/item/graph_floor/check_ins/count/count_request_builder.py +104 -0
- msgraph/generated/places/item/graph_floor/check_ins/item/check_in_claim_calendar_event_item_request_builder.py +188 -0
- msgraph/generated/places/item/graph_floor/graph_floor_request_builder.py +117 -0
- msgraph/generated/places/item/graph_room_list/graph_room_list_request_builder.py +10 -0
- msgraph/generated/places/item/graph_room_list/workspaces/count/count_request_builder.py +104 -0
- msgraph/generated/places/item/graph_room_list/workspaces/item/check_ins/check_ins_request_builder.py +208 -0
- msgraph/generated/places/item/graph_room_list/workspaces/item/check_ins/count/count_request_builder.py +104 -0
- msgraph/generated/places/item/graph_room_list/workspaces/item/check_ins/item/check_in_claim_calendar_event_item_request_builder.py +188 -0
- msgraph/generated/places/item/graph_room_list/workspaces/item/workspace_item_request_builder.py +198 -0
- msgraph/generated/places/item/graph_room_list/workspaces/workspaces_request_builder.py +208 -0
- msgraph/generated/places/item/graph_section/check_ins/check_ins_request_builder.py +208 -0
- msgraph/generated/places/item/graph_section/check_ins/count/count_request_builder.py +104 -0
- msgraph/generated/places/item/graph_section/check_ins/item/check_in_claim_calendar_event_item_request_builder.py +188 -0
- msgraph/generated/places/item/graph_section/graph_section_request_builder.py +117 -0
- msgraph/generated/places/item/graph_workspace/check_ins/check_ins_request_builder.py +208 -0
- msgraph/generated/places/item/graph_workspace/check_ins/count/count_request_builder.py +104 -0
- msgraph/generated/places/item/graph_workspace/check_ins/item/check_in_claim_calendar_event_item_request_builder.py +188 -0
- msgraph/generated/places/item/graph_workspace/graph_workspace_request_builder.py +117 -0
- msgraph/generated/places/item/place_item_request_builder.py +60 -0
- msgraph/generated/places/places_request_builder.py +50 -0
- msgraph/generated/security/identities/identities_request_builder.py +10 -0
- msgraph/generated/security/identities/identity_accounts/count/count_request_builder.py +104 -0
- msgraph/generated/security/identities/identity_accounts/identity_accounts_request_builder.py +209 -0
- msgraph/generated/security/identities/identity_accounts/item/identity_accounts_item_request_builder.py +199 -0
- msgraph/generated/security/identities/identity_accounts/item/microsoft_graph_security_invoke_action/invoke_action_post_request_body.py +68 -0
- msgraph/generated/security/identities/identity_accounts/item/microsoft_graph_security_invoke_action/microsoft_graph_security_invoke_action_request_builder.py +90 -0
- msgraph/generated/solutions/virtual_events/webinars/item/registrations/item/sessions/sessions_request_builder.py +3 -3
- msgraph/generated/storage/file_storage/containers/item/file_storage_container_item_request_builder.py +20 -0
- msgraph/generated/storage/file_storage/containers/item/migration_jobs/count/count_request_builder.py +104 -0
- msgraph/generated/storage/file_storage/containers/item/migration_jobs/item/progress_events/count/count_request_builder.py +104 -0
- msgraph/generated/storage/file_storage/containers/item/migration_jobs/item/progress_events/item/share_point_migration_event_item_request_builder.py +188 -0
- msgraph/generated/storage/file_storage/containers/item/migration_jobs/item/progress_events/progress_events_request_builder.py +208 -0
- msgraph/generated/storage/file_storage/containers/item/migration_jobs/item/share_point_migration_job_item_request_builder.py +198 -0
- msgraph/generated/storage/file_storage/containers/item/migration_jobs/migration_jobs_request_builder.py +208 -0
- msgraph/generated/storage/file_storage/containers/item/provision_migration_containers/provision_migration_containers_request_builder.py +81 -0
- msgraph/generated/storage/file_storage/deleted_containers/item/file_storage_container_item_request_builder.py +20 -0
- msgraph/generated/storage/file_storage/deleted_containers/item/migration_jobs/count/count_request_builder.py +104 -0
- msgraph/generated/storage/file_storage/deleted_containers/item/migration_jobs/item/progress_events/count/count_request_builder.py +104 -0
- msgraph/generated/storage/file_storage/deleted_containers/item/migration_jobs/item/progress_events/item/share_point_migration_event_item_request_builder.py +188 -0
- msgraph/generated/storage/file_storage/deleted_containers/item/migration_jobs/item/progress_events/progress_events_request_builder.py +208 -0
- msgraph/generated/storage/file_storage/deleted_containers/item/migration_jobs/item/share_point_migration_job_item_request_builder.py +198 -0
- msgraph/generated/storage/file_storage/deleted_containers/item/migration_jobs/migration_jobs_request_builder.py +208 -0
- msgraph/generated/storage/file_storage/deleted_containers/item/provision_migration_containers/provision_migration_containers_request_builder.py +81 -0
- msgraph/generated/users/item/chats/item/chat_item_request_builder.py +10 -0
- msgraph/generated/users/item/chats/item/remove_all_access_for_user/remove_all_access_for_user_post_request_body.py +57 -0
- msgraph/generated/users/item/chats/item/remove_all_access_for_user/remove_all_access_for_user_request_builder.py +87 -0
- {msgraph_sdk-1.49.0.dist-info → msgraph_sdk-1.50.0.dist-info}/METADATA +1 -1
- {msgraph_sdk-1.49.0.dist-info → msgraph_sdk-1.50.0.dist-info}/RECORD +176 -44
- {msgraph_sdk-1.49.0.dist-info → msgraph_sdk-1.50.0.dist-info}/WHEEL +0 -0
- {msgraph_sdk-1.49.0.dist-info → msgraph_sdk-1.50.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
from collections.abc import Callable
|
|
3
|
+
from dataclasses import dataclass, field
|
|
4
|
+
from kiota_abstractions.serialization import Parsable, ParseNode, SerializationWriter
|
|
5
|
+
from typing import Any, Optional, TYPE_CHECKING, Union
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from .public_error import PublicError
|
|
9
|
+
from .share_point_migration_event import SharePointMigrationEvent
|
|
10
|
+
from .share_point_migration_job_error_level import SharePointMigrationJobErrorLevel
|
|
11
|
+
from .share_point_migration_object_type import SharePointMigrationObjectType
|
|
12
|
+
|
|
13
|
+
from .share_point_migration_event import SharePointMigrationEvent
|
|
14
|
+
|
|
15
|
+
@dataclass
|
|
16
|
+
class SharePointMigrationJobErrorEvent(SharePointMigrationEvent, Parsable):
|
|
17
|
+
# The error property
|
|
18
|
+
error: Optional[PublicError] = None
|
|
19
|
+
# The errorLevel property
|
|
20
|
+
error_level: Optional[SharePointMigrationJobErrorLevel] = None
|
|
21
|
+
# The object ID. Read-only.
|
|
22
|
+
object_id: Optional[str] = None
|
|
23
|
+
# The objectType property
|
|
24
|
+
object_type: Optional[SharePointMigrationObjectType] = None
|
|
25
|
+
# The object URL. Read-only.
|
|
26
|
+
object_url: Optional[str] = None
|
|
27
|
+
# The OdataType property
|
|
28
|
+
odata_type: Optional[str] = None
|
|
29
|
+
# The current retry count of the job. Read-only.
|
|
30
|
+
total_retry_count: Optional[int] = None
|
|
31
|
+
|
|
32
|
+
@staticmethod
|
|
33
|
+
def create_from_discriminator_value(parse_node: ParseNode) -> SharePointMigrationJobErrorEvent:
|
|
34
|
+
"""
|
|
35
|
+
Creates a new instance of the appropriate class based on discriminator value
|
|
36
|
+
param parse_node: The parse node to use to read the discriminator value and create the object
|
|
37
|
+
Returns: SharePointMigrationJobErrorEvent
|
|
38
|
+
"""
|
|
39
|
+
if parse_node is None:
|
|
40
|
+
raise TypeError("parse_node cannot be null.")
|
|
41
|
+
return SharePointMigrationJobErrorEvent()
|
|
42
|
+
|
|
43
|
+
def get_field_deserializers(self,) -> dict[str, Callable[[ParseNode], None]]:
|
|
44
|
+
"""
|
|
45
|
+
The deserialization information for the current model
|
|
46
|
+
Returns: dict[str, Callable[[ParseNode], None]]
|
|
47
|
+
"""
|
|
48
|
+
from .public_error import PublicError
|
|
49
|
+
from .share_point_migration_event import SharePointMigrationEvent
|
|
50
|
+
from .share_point_migration_job_error_level import SharePointMigrationJobErrorLevel
|
|
51
|
+
from .share_point_migration_object_type import SharePointMigrationObjectType
|
|
52
|
+
|
|
53
|
+
from .public_error import PublicError
|
|
54
|
+
from .share_point_migration_event import SharePointMigrationEvent
|
|
55
|
+
from .share_point_migration_job_error_level import SharePointMigrationJobErrorLevel
|
|
56
|
+
from .share_point_migration_object_type import SharePointMigrationObjectType
|
|
57
|
+
|
|
58
|
+
fields: dict[str, Callable[[Any], None]] = {
|
|
59
|
+
"error": lambda n : setattr(self, 'error', n.get_object_value(PublicError)),
|
|
60
|
+
"errorLevel": lambda n : setattr(self, 'error_level', n.get_enum_value(SharePointMigrationJobErrorLevel)),
|
|
61
|
+
"objectId": lambda n : setattr(self, 'object_id', n.get_str_value()),
|
|
62
|
+
"objectType": lambda n : setattr(self, 'object_type', n.get_enum_value(SharePointMigrationObjectType)),
|
|
63
|
+
"objectUrl": lambda n : setattr(self, 'object_url', n.get_str_value()),
|
|
64
|
+
"totalRetryCount": lambda n : setattr(self, 'total_retry_count', n.get_int_value()),
|
|
65
|
+
}
|
|
66
|
+
super_fields = super().get_field_deserializers()
|
|
67
|
+
fields.update(super_fields)
|
|
68
|
+
return fields
|
|
69
|
+
|
|
70
|
+
def serialize(self,writer: SerializationWriter) -> None:
|
|
71
|
+
"""
|
|
72
|
+
Serializes information the current object
|
|
73
|
+
param writer: Serialization writer to use to serialize this model
|
|
74
|
+
Returns: None
|
|
75
|
+
"""
|
|
76
|
+
if writer is None:
|
|
77
|
+
raise TypeError("writer cannot be null.")
|
|
78
|
+
super().serialize(writer)
|
|
79
|
+
writer.write_object_value("error", self.error)
|
|
80
|
+
writer.write_enum_value("errorLevel", self.error_level)
|
|
81
|
+
writer.write_str_value("objectId", self.object_id)
|
|
82
|
+
writer.write_enum_value("objectType", self.object_type)
|
|
83
|
+
writer.write_str_value("objectUrl", self.object_url)
|
|
84
|
+
writer.write_int_value("totalRetryCount", self.total_retry_count)
|
|
85
|
+
|
|
86
|
+
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
import datetime
|
|
3
|
+
from collections.abc import Callable
|
|
4
|
+
from dataclasses import dataclass, field
|
|
5
|
+
from kiota_abstractions.serialization import Parsable, ParseNode, SerializationWriter
|
|
6
|
+
from typing import Any, Optional, TYPE_CHECKING, Union
|
|
7
|
+
|
|
8
|
+
if TYPE_CHECKING:
|
|
9
|
+
from .share_point_migration_event import SharePointMigrationEvent
|
|
10
|
+
|
|
11
|
+
from .share_point_migration_event import SharePointMigrationEvent
|
|
12
|
+
|
|
13
|
+
@dataclass
|
|
14
|
+
class SharePointMigrationJobPostponedEvent(SharePointMigrationEvent, Parsable):
|
|
15
|
+
# The number of migration jobs in the queue of the current database. Read-only.
|
|
16
|
+
jobs_in_queue: Optional[int] = None
|
|
17
|
+
# The date and time that indicate when this job is picked up next. The timestamp type represents date and time information using ISO 8601 format and is always in UTC. For example, midnight UTC on Jan 1, 2014 is 2014-01-01T00:00:00Z. Read-only.
|
|
18
|
+
next_pickup_date_time: Optional[datetime.datetime] = None
|
|
19
|
+
# The OdataType property
|
|
20
|
+
odata_type: Optional[str] = None
|
|
21
|
+
# The reason for the postponement. Read-only.
|
|
22
|
+
reason: Optional[str] = None
|
|
23
|
+
# The current retry count of the job. Read-only.
|
|
24
|
+
total_retry_count: Optional[int] = None
|
|
25
|
+
|
|
26
|
+
@staticmethod
|
|
27
|
+
def create_from_discriminator_value(parse_node: ParseNode) -> SharePointMigrationJobPostponedEvent:
|
|
28
|
+
"""
|
|
29
|
+
Creates a new instance of the appropriate class based on discriminator value
|
|
30
|
+
param parse_node: The parse node to use to read the discriminator value and create the object
|
|
31
|
+
Returns: SharePointMigrationJobPostponedEvent
|
|
32
|
+
"""
|
|
33
|
+
if parse_node is None:
|
|
34
|
+
raise TypeError("parse_node cannot be null.")
|
|
35
|
+
return SharePointMigrationJobPostponedEvent()
|
|
36
|
+
|
|
37
|
+
def get_field_deserializers(self,) -> dict[str, Callable[[ParseNode], None]]:
|
|
38
|
+
"""
|
|
39
|
+
The deserialization information for the current model
|
|
40
|
+
Returns: dict[str, Callable[[ParseNode], None]]
|
|
41
|
+
"""
|
|
42
|
+
from .share_point_migration_event import SharePointMigrationEvent
|
|
43
|
+
|
|
44
|
+
from .share_point_migration_event import SharePointMigrationEvent
|
|
45
|
+
|
|
46
|
+
fields: dict[str, Callable[[Any], None]] = {
|
|
47
|
+
"jobsInQueue": lambda n : setattr(self, 'jobs_in_queue', n.get_int_value()),
|
|
48
|
+
"nextPickupDateTime": lambda n : setattr(self, 'next_pickup_date_time', n.get_datetime_value()),
|
|
49
|
+
"reason": lambda n : setattr(self, 'reason', n.get_str_value()),
|
|
50
|
+
"totalRetryCount": lambda n : setattr(self, 'total_retry_count', n.get_int_value()),
|
|
51
|
+
}
|
|
52
|
+
super_fields = super().get_field_deserializers()
|
|
53
|
+
fields.update(super_fields)
|
|
54
|
+
return fields
|
|
55
|
+
|
|
56
|
+
def serialize(self,writer: SerializationWriter) -> None:
|
|
57
|
+
"""
|
|
58
|
+
Serializes information the current object
|
|
59
|
+
param writer: Serialization writer to use to serialize this model
|
|
60
|
+
Returns: None
|
|
61
|
+
"""
|
|
62
|
+
if writer is None:
|
|
63
|
+
raise TypeError("writer cannot be null.")
|
|
64
|
+
super().serialize(writer)
|
|
65
|
+
writer.write_int_value("jobsInQueue", self.jobs_in_queue)
|
|
66
|
+
writer.write_datetime_value("nextPickupDateTime", self.next_pickup_date_time)
|
|
67
|
+
writer.write_str_value("reason", self.reason)
|
|
68
|
+
writer.write_int_value("totalRetryCount", self.total_retry_count)
|
|
69
|
+
|
|
70
|
+
|
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
from collections.abc import Callable
|
|
3
|
+
from dataclasses import dataclass, field
|
|
4
|
+
from kiota_abstractions.serialization import Parsable, ParseNode, SerializationWriter
|
|
5
|
+
from typing import Any, Optional, TYPE_CHECKING, Union
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from .share_point_migration_event import SharePointMigrationEvent
|
|
9
|
+
|
|
10
|
+
from .share_point_migration_event import SharePointMigrationEvent
|
|
11
|
+
|
|
12
|
+
@dataclass
|
|
13
|
+
class SharePointMigrationJobProgressEvent(SharePointMigrationEvent, Parsable):
|
|
14
|
+
# The number of bytes processed. Read-only.
|
|
15
|
+
bytes_processed: Optional[int] = None
|
|
16
|
+
# The number of bytes processed with version history excluded. Read-only.
|
|
17
|
+
bytes_processed_only_current_version: Optional[int] = None
|
|
18
|
+
# CPU duration in milliseconds. Read-only.
|
|
19
|
+
cpu_duration_ms: Optional[int] = None
|
|
20
|
+
# The number of files processed. Read-only.
|
|
21
|
+
files_processed: Optional[int] = None
|
|
22
|
+
# The number of files processed with version history excluded. Read-only.
|
|
23
|
+
files_processed_only_current_version: Optional[int] = None
|
|
24
|
+
# True if the job status is End. False if the job is In progress. Read-only.
|
|
25
|
+
is_completed: Optional[bool] = None
|
|
26
|
+
# The unique identifier of the last object processed. Read-only.
|
|
27
|
+
last_processed_object_id: Optional[str] = None
|
|
28
|
+
# The number of objects processed. Read-only.
|
|
29
|
+
objects_processed: Optional[int] = None
|
|
30
|
+
# The OdataType property
|
|
31
|
+
odata_type: Optional[str] = None
|
|
32
|
+
# SQL duration in milliseconds. Read-only.
|
|
33
|
+
sql_duration_ms: Optional[int] = None
|
|
34
|
+
# SQL query count. Read-only.
|
|
35
|
+
sql_query_count: Optional[int] = None
|
|
36
|
+
# Total duration time in milliseconds. Read-only.
|
|
37
|
+
total_duration_ms: Optional[int] = None
|
|
38
|
+
# Total errors. Read-only.
|
|
39
|
+
total_errors: Optional[int] = None
|
|
40
|
+
# Total bytes to be processed. Read-only.
|
|
41
|
+
total_expected_bytes: Optional[int] = None
|
|
42
|
+
# The number of objects to process. Read-only.
|
|
43
|
+
total_expected_objects: Optional[int] = None
|
|
44
|
+
# The current retry count of the job. Read-only.
|
|
45
|
+
total_retry_count: Optional[int] = None
|
|
46
|
+
# Total warnings. Read-only.
|
|
47
|
+
total_warnings: Optional[int] = None
|
|
48
|
+
# Waiting time due to SQL throttling, in milliseconds. Read-only.
|
|
49
|
+
wait_time_on_sql_throttling_ms: Optional[int] = None
|
|
50
|
+
|
|
51
|
+
@staticmethod
|
|
52
|
+
def create_from_discriminator_value(parse_node: ParseNode) -> SharePointMigrationJobProgressEvent:
|
|
53
|
+
"""
|
|
54
|
+
Creates a new instance of the appropriate class based on discriminator value
|
|
55
|
+
param parse_node: The parse node to use to read the discriminator value and create the object
|
|
56
|
+
Returns: SharePointMigrationJobProgressEvent
|
|
57
|
+
"""
|
|
58
|
+
if parse_node is None:
|
|
59
|
+
raise TypeError("parse_node cannot be null.")
|
|
60
|
+
return SharePointMigrationJobProgressEvent()
|
|
61
|
+
|
|
62
|
+
def get_field_deserializers(self,) -> dict[str, Callable[[ParseNode], None]]:
|
|
63
|
+
"""
|
|
64
|
+
The deserialization information for the current model
|
|
65
|
+
Returns: dict[str, Callable[[ParseNode], None]]
|
|
66
|
+
"""
|
|
67
|
+
from .share_point_migration_event import SharePointMigrationEvent
|
|
68
|
+
|
|
69
|
+
from .share_point_migration_event import SharePointMigrationEvent
|
|
70
|
+
|
|
71
|
+
fields: dict[str, Callable[[Any], None]] = {
|
|
72
|
+
"bytesProcessed": lambda n : setattr(self, 'bytes_processed', n.get_int_value()),
|
|
73
|
+
"bytesProcessedOnlyCurrentVersion": lambda n : setattr(self, 'bytes_processed_only_current_version', n.get_int_value()),
|
|
74
|
+
"cpuDurationMs": lambda n : setattr(self, 'cpu_duration_ms', n.get_int_value()),
|
|
75
|
+
"filesProcessed": lambda n : setattr(self, 'files_processed', n.get_int_value()),
|
|
76
|
+
"filesProcessedOnlyCurrentVersion": lambda n : setattr(self, 'files_processed_only_current_version', n.get_int_value()),
|
|
77
|
+
"isCompleted": lambda n : setattr(self, 'is_completed', n.get_bool_value()),
|
|
78
|
+
"lastProcessedObjectId": lambda n : setattr(self, 'last_processed_object_id', n.get_str_value()),
|
|
79
|
+
"objectsProcessed": lambda n : setattr(self, 'objects_processed', n.get_int_value()),
|
|
80
|
+
"sqlDurationMs": lambda n : setattr(self, 'sql_duration_ms', n.get_int_value()),
|
|
81
|
+
"sqlQueryCount": lambda n : setattr(self, 'sql_query_count', n.get_int_value()),
|
|
82
|
+
"totalDurationMs": lambda n : setattr(self, 'total_duration_ms', n.get_int_value()),
|
|
83
|
+
"totalErrors": lambda n : setattr(self, 'total_errors', n.get_int_value()),
|
|
84
|
+
"totalExpectedBytes": lambda n : setattr(self, 'total_expected_bytes', n.get_int_value()),
|
|
85
|
+
"totalExpectedObjects": lambda n : setattr(self, 'total_expected_objects', n.get_int_value()),
|
|
86
|
+
"totalRetryCount": lambda n : setattr(self, 'total_retry_count', n.get_int_value()),
|
|
87
|
+
"totalWarnings": lambda n : setattr(self, 'total_warnings', n.get_int_value()),
|
|
88
|
+
"waitTimeOnSqlThrottlingMs": lambda n : setattr(self, 'wait_time_on_sql_throttling_ms', n.get_int_value()),
|
|
89
|
+
}
|
|
90
|
+
super_fields = super().get_field_deserializers()
|
|
91
|
+
fields.update(super_fields)
|
|
92
|
+
return fields
|
|
93
|
+
|
|
94
|
+
def serialize(self,writer: SerializationWriter) -> None:
|
|
95
|
+
"""
|
|
96
|
+
Serializes information the current object
|
|
97
|
+
param writer: Serialization writer to use to serialize this model
|
|
98
|
+
Returns: None
|
|
99
|
+
"""
|
|
100
|
+
if writer is None:
|
|
101
|
+
raise TypeError("writer cannot be null.")
|
|
102
|
+
super().serialize(writer)
|
|
103
|
+
writer.write_int_value("bytesProcessed", self.bytes_processed)
|
|
104
|
+
writer.write_int_value("bytesProcessedOnlyCurrentVersion", self.bytes_processed_only_current_version)
|
|
105
|
+
writer.write_int_value("cpuDurationMs", self.cpu_duration_ms)
|
|
106
|
+
writer.write_int_value("filesProcessed", self.files_processed)
|
|
107
|
+
writer.write_int_value("filesProcessedOnlyCurrentVersion", self.files_processed_only_current_version)
|
|
108
|
+
writer.write_bool_value("isCompleted", self.is_completed)
|
|
109
|
+
writer.write_str_value("lastProcessedObjectId", self.last_processed_object_id)
|
|
110
|
+
writer.write_int_value("objectsProcessed", self.objects_processed)
|
|
111
|
+
writer.write_int_value("sqlDurationMs", self.sql_duration_ms)
|
|
112
|
+
writer.write_int_value("sqlQueryCount", self.sql_query_count)
|
|
113
|
+
writer.write_int_value("totalDurationMs", self.total_duration_ms)
|
|
114
|
+
writer.write_int_value("totalErrors", self.total_errors)
|
|
115
|
+
writer.write_int_value("totalExpectedBytes", self.total_expected_bytes)
|
|
116
|
+
writer.write_int_value("totalExpectedObjects", self.total_expected_objects)
|
|
117
|
+
writer.write_int_value("totalRetryCount", self.total_retry_count)
|
|
118
|
+
writer.write_int_value("totalWarnings", self.total_warnings)
|
|
119
|
+
writer.write_int_value("waitTimeOnSqlThrottlingMs", self.wait_time_on_sql_throttling_ms)
|
|
120
|
+
|
|
121
|
+
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
from collections.abc import Callable
|
|
3
|
+
from dataclasses import dataclass, field
|
|
4
|
+
from kiota_abstractions.serialization import Parsable, ParseNode, SerializationWriter
|
|
5
|
+
from typing import Any, Optional, TYPE_CHECKING, Union
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from .share_point_migration_event import SharePointMigrationEvent
|
|
9
|
+
|
|
10
|
+
from .share_point_migration_event import SharePointMigrationEvent
|
|
11
|
+
|
|
12
|
+
@dataclass
|
|
13
|
+
class SharePointMigrationJobQueuedEvent(SharePointMigrationEvent, Parsable):
|
|
14
|
+
# The OdataType property
|
|
15
|
+
odata_type: Optional[str] = None
|
|
16
|
+
|
|
17
|
+
@staticmethod
|
|
18
|
+
def create_from_discriminator_value(parse_node: ParseNode) -> SharePointMigrationJobQueuedEvent:
|
|
19
|
+
"""
|
|
20
|
+
Creates a new instance of the appropriate class based on discriminator value
|
|
21
|
+
param parse_node: The parse node to use to read the discriminator value and create the object
|
|
22
|
+
Returns: SharePointMigrationJobQueuedEvent
|
|
23
|
+
"""
|
|
24
|
+
if parse_node is None:
|
|
25
|
+
raise TypeError("parse_node cannot be null.")
|
|
26
|
+
return SharePointMigrationJobQueuedEvent()
|
|
27
|
+
|
|
28
|
+
def get_field_deserializers(self,) -> dict[str, Callable[[ParseNode], None]]:
|
|
29
|
+
"""
|
|
30
|
+
The deserialization information for the current model
|
|
31
|
+
Returns: dict[str, Callable[[ParseNode], None]]
|
|
32
|
+
"""
|
|
33
|
+
from .share_point_migration_event import SharePointMigrationEvent
|
|
34
|
+
|
|
35
|
+
from .share_point_migration_event import SharePointMigrationEvent
|
|
36
|
+
|
|
37
|
+
fields: dict[str, Callable[[Any], None]] = {
|
|
38
|
+
}
|
|
39
|
+
super_fields = super().get_field_deserializers()
|
|
40
|
+
fields.update(super_fields)
|
|
41
|
+
return fields
|
|
42
|
+
|
|
43
|
+
def serialize(self,writer: SerializationWriter) -> None:
|
|
44
|
+
"""
|
|
45
|
+
Serializes information the current object
|
|
46
|
+
param writer: Serialization writer to use to serialize this model
|
|
47
|
+
Returns: None
|
|
48
|
+
"""
|
|
49
|
+
if writer is None:
|
|
50
|
+
raise TypeError("writer cannot be null.")
|
|
51
|
+
super().serialize(writer)
|
|
52
|
+
|
|
53
|
+
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
from collections.abc import Callable
|
|
3
|
+
from dataclasses import dataclass, field
|
|
4
|
+
from kiota_abstractions.serialization import Parsable, ParseNode, SerializationWriter
|
|
5
|
+
from typing import Any, Optional, TYPE_CHECKING, Union
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from .share_point_migration_event import SharePointMigrationEvent
|
|
9
|
+
|
|
10
|
+
from .share_point_migration_event import SharePointMigrationEvent
|
|
11
|
+
|
|
12
|
+
@dataclass
|
|
13
|
+
class SharePointMigrationJobStartEvent(SharePointMigrationEvent, Parsable):
|
|
14
|
+
# True if the job is restarted. False if it's the initial start. Read-only.
|
|
15
|
+
is_restarted: Optional[bool] = None
|
|
16
|
+
# The OdataType property
|
|
17
|
+
odata_type: Optional[str] = None
|
|
18
|
+
# The current retry count of the job. Read-only.
|
|
19
|
+
total_retry_count: Optional[int] = None
|
|
20
|
+
|
|
21
|
+
@staticmethod
|
|
22
|
+
def create_from_discriminator_value(parse_node: ParseNode) -> SharePointMigrationJobStartEvent:
|
|
23
|
+
"""
|
|
24
|
+
Creates a new instance of the appropriate class based on discriminator value
|
|
25
|
+
param parse_node: The parse node to use to read the discriminator value and create the object
|
|
26
|
+
Returns: SharePointMigrationJobStartEvent
|
|
27
|
+
"""
|
|
28
|
+
if parse_node is None:
|
|
29
|
+
raise TypeError("parse_node cannot be null.")
|
|
30
|
+
return SharePointMigrationJobStartEvent()
|
|
31
|
+
|
|
32
|
+
def get_field_deserializers(self,) -> dict[str, Callable[[ParseNode], None]]:
|
|
33
|
+
"""
|
|
34
|
+
The deserialization information for the current model
|
|
35
|
+
Returns: dict[str, Callable[[ParseNode], None]]
|
|
36
|
+
"""
|
|
37
|
+
from .share_point_migration_event import SharePointMigrationEvent
|
|
38
|
+
|
|
39
|
+
from .share_point_migration_event import SharePointMigrationEvent
|
|
40
|
+
|
|
41
|
+
fields: dict[str, Callable[[Any], None]] = {
|
|
42
|
+
"isRestarted": lambda n : setattr(self, 'is_restarted', n.get_bool_value()),
|
|
43
|
+
"totalRetryCount": lambda n : setattr(self, 'total_retry_count', n.get_int_value()),
|
|
44
|
+
}
|
|
45
|
+
super_fields = super().get_field_deserializers()
|
|
46
|
+
fields.update(super_fields)
|
|
47
|
+
return fields
|
|
48
|
+
|
|
49
|
+
def serialize(self,writer: SerializationWriter) -> None:
|
|
50
|
+
"""
|
|
51
|
+
Serializes information the current object
|
|
52
|
+
param writer: Serialization writer to use to serialize this model
|
|
53
|
+
Returns: None
|
|
54
|
+
"""
|
|
55
|
+
if writer is None:
|
|
56
|
+
raise TypeError("writer cannot be null.")
|
|
57
|
+
super().serialize(writer)
|
|
58
|
+
writer.write_bool_value("isRestarted", self.is_restarted)
|
|
59
|
+
writer.write_int_value("totalRetryCount", self.total_retry_count)
|
|
60
|
+
|
|
61
|
+
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
from enum import Enum
|
|
2
|
+
|
|
3
|
+
class SharePointMigrationObjectType(str, Enum):
|
|
4
|
+
Site = "site",
|
|
5
|
+
Web = "web",
|
|
6
|
+
Folder = "folder",
|
|
7
|
+
List_ = "list",
|
|
8
|
+
ListItem = "listItem",
|
|
9
|
+
File = "file",
|
|
10
|
+
Alert = "alert",
|
|
11
|
+
SharedWithObject = "sharedWithObject",
|
|
12
|
+
Invalid = "invalid",
|
|
13
|
+
UnknownFutureValue = "unknownFutureValue",
|
|
14
|
+
|
|
@@ -49,7 +49,7 @@ class SignIn(Entity, Parsable):
|
|
|
49
49
|
resource_display_name: Optional[str] = None
|
|
50
50
|
# ID of the resource that the user signed into. Supports $filter (eq).
|
|
51
51
|
resource_id: Optional[str] = None
|
|
52
|
-
# The reason behind a specific state of a risky user, sign-in, or a risk event. The
|
|
52
|
+
# The reason behind a specific state of a risky user, sign-in, or a risk event. The value none means that Microsoft Entra risk detection did not flag the user or the sign-in as a risky event so far. Supports $filter (eq). Note: Details for this property are only available for Microsoft Entra ID P2 customers. All other customers are returned hidden.
|
|
53
53
|
risk_detail: Optional[RiskDetail] = None
|
|
54
54
|
# The riskEventTypes property
|
|
55
55
|
risk_event_types: Optional[list[RiskEventType]] = None
|
|
@@ -24,7 +24,7 @@ class SubjectRightsRequestHistory(AdditionalDataHolder, BackedModel, Parsable):
|
|
|
24
24
|
event_date_time: Optional[datetime.datetime] = None
|
|
25
25
|
# The OdataType property
|
|
26
26
|
odata_type: Optional[str] = None
|
|
27
|
-
# The stage when the entity was changed. Possible values are: contentRetrieval, contentReview, generateReport, contentDeletion, caseResolved, unknownFutureValue, approval. Use the Prefer: include-unknown-enum-members request header to get the following
|
|
27
|
+
# The stage when the entity was changed. Possible values are: contentRetrieval, contentReview, generateReport, contentDeletion, caseResolved, unknownFutureValue, approval. Use the Prefer: include-unknown-enum-members request header to get the following members in this evolvable enum: approval.
|
|
28
28
|
stage: Optional[SubjectRightsRequestStage] = None
|
|
29
29
|
# The status of the stage when the entity was changed. Possible values are: notStarted, current, completed, failed, unknownFutureValue.
|
|
30
30
|
stage_status: Optional[SubjectRightsRequestStageStatus] = None
|
|
@@ -21,7 +21,7 @@ class SubjectRightsRequestStageDetail(AdditionalDataHolder, BackedModel, Parsabl
|
|
|
21
21
|
error: Optional[PublicError] = None
|
|
22
22
|
# The OdataType property
|
|
23
23
|
odata_type: Optional[str] = None
|
|
24
|
-
# The stage of the subject rights request. Possible values are: contentRetrieval, contentReview, generateReport, contentDeletion, caseResolved, unknownFutureValue, approval. Use the Prefer: include-unknown-enum-members request header to get the following
|
|
24
|
+
# The stage of the subject rights request. Possible values are: contentRetrieval, contentReview, generateReport, contentDeletion, caseResolved, unknownFutureValue, approval. Use the Prefer: include-unknown-enum-members request header to get the following members in this evolvable enum: approval.
|
|
25
25
|
stage: Optional[SubjectRightsRequestStage] = None
|
|
26
26
|
# Status of the current stage. Possible values are: notStarted, current, completed, failed, unknownFutureValue.
|
|
27
27
|
status: Optional[SubjectRightsRequestStageStatus] = None
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
from collections.abc import Callable
|
|
3
|
+
from dataclasses import dataclass, field
|
|
4
|
+
from kiota_abstractions.serialization import Parsable, ParseNode, SerializationWriter
|
|
5
|
+
from typing import Any, Optional, TYPE_CHECKING, Union
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from .base_map_feature import BaseMapFeature
|
|
9
|
+
|
|
10
|
+
from .base_map_feature import BaseMapFeature
|
|
11
|
+
|
|
12
|
+
@dataclass
|
|
13
|
+
class UnitMap(BaseMapFeature, Parsable):
|
|
14
|
+
# The OdataType property
|
|
15
|
+
odata_type: Optional[str] = "#microsoft.graph.unitMap"
|
|
16
|
+
# The placeId property
|
|
17
|
+
place_id: Optional[str] = None
|
|
18
|
+
|
|
19
|
+
@staticmethod
|
|
20
|
+
def create_from_discriminator_value(parse_node: ParseNode) -> UnitMap:
|
|
21
|
+
"""
|
|
22
|
+
Creates a new instance of the appropriate class based on discriminator value
|
|
23
|
+
param parse_node: The parse node to use to read the discriminator value and create the object
|
|
24
|
+
Returns: UnitMap
|
|
25
|
+
"""
|
|
26
|
+
if parse_node is None:
|
|
27
|
+
raise TypeError("parse_node cannot be null.")
|
|
28
|
+
return UnitMap()
|
|
29
|
+
|
|
30
|
+
def get_field_deserializers(self,) -> dict[str, Callable[[ParseNode], None]]:
|
|
31
|
+
"""
|
|
32
|
+
The deserialization information for the current model
|
|
33
|
+
Returns: dict[str, Callable[[ParseNode], None]]
|
|
34
|
+
"""
|
|
35
|
+
from .base_map_feature import BaseMapFeature
|
|
36
|
+
|
|
37
|
+
from .base_map_feature import BaseMapFeature
|
|
38
|
+
|
|
39
|
+
fields: dict[str, Callable[[Any], None]] = {
|
|
40
|
+
"placeId": lambda n : setattr(self, 'place_id', n.get_str_value()),
|
|
41
|
+
}
|
|
42
|
+
super_fields = super().get_field_deserializers()
|
|
43
|
+
fields.update(super_fields)
|
|
44
|
+
return fields
|
|
45
|
+
|
|
46
|
+
def serialize(self,writer: SerializationWriter) -> None:
|
|
47
|
+
"""
|
|
48
|
+
Serializes information the current object
|
|
49
|
+
param writer: Serialization writer to use to serialize this model
|
|
50
|
+
Returns: None
|
|
51
|
+
"""
|
|
52
|
+
if writer is None:
|
|
53
|
+
raise TypeError("writer cannot be null.")
|
|
54
|
+
super().serialize(writer)
|
|
55
|
+
writer.write_str_value("placeId", self.place_id)
|
|
56
|
+
|
|
57
|
+
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
from collections.abc import Callable
|
|
3
|
+
from dataclasses import dataclass, field
|
|
4
|
+
from kiota_abstractions.serialization import Parsable, ParseNode, SerializationWriter
|
|
5
|
+
from typing import Any, Optional, TYPE_CHECKING, Union
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from .base_collection_pagination_count_response import BaseCollectionPaginationCountResponse
|
|
9
|
+
from .unit_map import UnitMap
|
|
10
|
+
|
|
11
|
+
from .base_collection_pagination_count_response import BaseCollectionPaginationCountResponse
|
|
12
|
+
|
|
13
|
+
@dataclass
|
|
14
|
+
class UnitMapCollectionResponse(BaseCollectionPaginationCountResponse, Parsable):
|
|
15
|
+
# The value property
|
|
16
|
+
value: Optional[list[UnitMap]] = None
|
|
17
|
+
|
|
18
|
+
@staticmethod
|
|
19
|
+
def create_from_discriminator_value(parse_node: ParseNode) -> UnitMapCollectionResponse:
|
|
20
|
+
"""
|
|
21
|
+
Creates a new instance of the appropriate class based on discriminator value
|
|
22
|
+
param parse_node: The parse node to use to read the discriminator value and create the object
|
|
23
|
+
Returns: UnitMapCollectionResponse
|
|
24
|
+
"""
|
|
25
|
+
if parse_node is None:
|
|
26
|
+
raise TypeError("parse_node cannot be null.")
|
|
27
|
+
return UnitMapCollectionResponse()
|
|
28
|
+
|
|
29
|
+
def get_field_deserializers(self,) -> dict[str, Callable[[ParseNode], None]]:
|
|
30
|
+
"""
|
|
31
|
+
The deserialization information for the current model
|
|
32
|
+
Returns: dict[str, Callable[[ParseNode], None]]
|
|
33
|
+
"""
|
|
34
|
+
from .base_collection_pagination_count_response import BaseCollectionPaginationCountResponse
|
|
35
|
+
from .unit_map import UnitMap
|
|
36
|
+
|
|
37
|
+
from .base_collection_pagination_count_response import BaseCollectionPaginationCountResponse
|
|
38
|
+
from .unit_map import UnitMap
|
|
39
|
+
|
|
40
|
+
fields: dict[str, Callable[[Any], None]] = {
|
|
41
|
+
"value": lambda n : setattr(self, 'value', n.get_collection_of_object_values(UnitMap)),
|
|
42
|
+
}
|
|
43
|
+
super_fields = super().get_field_deserializers()
|
|
44
|
+
fields.update(super_fields)
|
|
45
|
+
return fields
|
|
46
|
+
|
|
47
|
+
def serialize(self,writer: SerializationWriter) -> None:
|
|
48
|
+
"""
|
|
49
|
+
Serializes information the current object
|
|
50
|
+
param writer: Serialization writer to use to serialize this model
|
|
51
|
+
Returns: None
|
|
52
|
+
"""
|
|
53
|
+
if writer is None:
|
|
54
|
+
raise TypeError("writer cannot be null.")
|
|
55
|
+
super().serialize(writer)
|
|
56
|
+
writer.write_collection_of_object_values("value", self.value)
|
|
57
|
+
|
|
58
|
+
|
|
@@ -20,7 +20,7 @@ class WorkforceIntegration(ChangeTrackedEntity, Parsable):
|
|
|
20
20
|
api_version: Optional[int] = None
|
|
21
21
|
# Name of the workforce integration.
|
|
22
22
|
display_name: Optional[str] = None
|
|
23
|
-
# Support to view eligibility-filtered results. Possible values are: none, swapRequest, offerShiftRequest, unknownFutureValue, timeOffReason. Use the Prefer: include-unknown-enum-members request header to get the following
|
|
23
|
+
# Support to view eligibility-filtered results. Possible values are: none, swapRequest, offerShiftRequest, unknownFutureValue, timeOffReason. Use the Prefer: include-unknown-enum-members request header to get the following members in this evolvable enum: timeOffReason.
|
|
24
24
|
eligibility_filtering_enabled_entities: Optional[EligibilityFilteringEnabledEntities] = None
|
|
25
25
|
# The workforce integration encryption resource.
|
|
26
26
|
encryption: Optional[WorkforceIntegrationEncryption] = None
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
from collections.abc import Callable
|
|
3
|
+
from dataclasses import dataclass, field
|
|
4
|
+
from kiota_abstractions.serialization import Parsable, ParseNode, SerializationWriter
|
|
5
|
+
from typing import Any, Optional, TYPE_CHECKING, Union
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from .place import Place
|
|
9
|
+
from .place_mode import PlaceMode
|
|
10
|
+
|
|
11
|
+
from .place import Place
|
|
12
|
+
|
|
13
|
+
@dataclass
|
|
14
|
+
class Workspace(Place, Parsable):
|
|
15
|
+
# The OdataType property
|
|
16
|
+
odata_type: Optional[str] = "#microsoft.graph.workspace"
|
|
17
|
+
# The capacity property
|
|
18
|
+
capacity: Optional[int] = None
|
|
19
|
+
# The displayDeviceName property
|
|
20
|
+
display_device_name: Optional[str] = None
|
|
21
|
+
# The emailAddress property
|
|
22
|
+
email_address: Optional[str] = None
|
|
23
|
+
# The mode property
|
|
24
|
+
mode: Optional[PlaceMode] = None
|
|
25
|
+
# The nickname property
|
|
26
|
+
nickname: Optional[str] = None
|
|
27
|
+
|
|
28
|
+
@staticmethod
|
|
29
|
+
def create_from_discriminator_value(parse_node: ParseNode) -> Workspace:
|
|
30
|
+
"""
|
|
31
|
+
Creates a new instance of the appropriate class based on discriminator value
|
|
32
|
+
param parse_node: The parse node to use to read the discriminator value and create the object
|
|
33
|
+
Returns: Workspace
|
|
34
|
+
"""
|
|
35
|
+
if parse_node is None:
|
|
36
|
+
raise TypeError("parse_node cannot be null.")
|
|
37
|
+
return Workspace()
|
|
38
|
+
|
|
39
|
+
def get_field_deserializers(self,) -> dict[str, Callable[[ParseNode], None]]:
|
|
40
|
+
"""
|
|
41
|
+
The deserialization information for the current model
|
|
42
|
+
Returns: dict[str, Callable[[ParseNode], None]]
|
|
43
|
+
"""
|
|
44
|
+
from .place import Place
|
|
45
|
+
from .place_mode import PlaceMode
|
|
46
|
+
|
|
47
|
+
from .place import Place
|
|
48
|
+
from .place_mode import PlaceMode
|
|
49
|
+
|
|
50
|
+
fields: dict[str, Callable[[Any], None]] = {
|
|
51
|
+
"capacity": lambda n : setattr(self, 'capacity', n.get_int_value()),
|
|
52
|
+
"displayDeviceName": lambda n : setattr(self, 'display_device_name', n.get_str_value()),
|
|
53
|
+
"emailAddress": lambda n : setattr(self, 'email_address', n.get_str_value()),
|
|
54
|
+
"mode": lambda n : setattr(self, 'mode', n.get_object_value(PlaceMode)),
|
|
55
|
+
"nickname": lambda n : setattr(self, 'nickname', n.get_str_value()),
|
|
56
|
+
}
|
|
57
|
+
super_fields = super().get_field_deserializers()
|
|
58
|
+
fields.update(super_fields)
|
|
59
|
+
return fields
|
|
60
|
+
|
|
61
|
+
def serialize(self,writer: SerializationWriter) -> None:
|
|
62
|
+
"""
|
|
63
|
+
Serializes information the current object
|
|
64
|
+
param writer: Serialization writer to use to serialize this model
|
|
65
|
+
Returns: None
|
|
66
|
+
"""
|
|
67
|
+
if writer is None:
|
|
68
|
+
raise TypeError("writer cannot be null.")
|
|
69
|
+
super().serialize(writer)
|
|
70
|
+
writer.write_int_value("capacity", self.capacity)
|
|
71
|
+
writer.write_str_value("displayDeviceName", self.display_device_name)
|
|
72
|
+
writer.write_str_value("emailAddress", self.email_address)
|
|
73
|
+
writer.write_object_value("mode", self.mode)
|
|
74
|
+
writer.write_str_value("nickname", self.nickname)
|
|
75
|
+
|
|
76
|
+
|