anaplan-sdk 0.4.5__py3-none-any.whl → 0.5.0a2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,392 @@
1
+ import asyncio
2
+ import logging
3
+ import random
4
+ import time
5
+ from asyncio import gather, sleep
6
+ from concurrent.futures import ThreadPoolExecutor
7
+ from gzip import compress
8
+ from itertools import chain
9
+ from math import ceil
10
+ from typing import Any, Awaitable, Callable, Coroutine, Iterator, Literal, Type, TypeVar
11
+
12
+ import httpx
13
+ from httpx import HTTPError, Response
14
+
15
+ from .exceptions import AnaplanException, AnaplanTimeoutException, InvalidIdentifierException
16
+ from .models import (
17
+ AnaplanModel,
18
+ InsertionResult,
19
+ ModelCalendar,
20
+ MonthsQuartersYearsCalendar,
21
+ TaskSummary,
22
+ WeeksGeneralCalendar,
23
+ WeeksGroupingCalendar,
24
+ WeeksPeriodsCalendar,
25
+ )
26
+ from .models.cloud_works import (
27
+ AmazonS3ConnectionInput,
28
+ AzureBlobConnectionInput,
29
+ ConnectionBody,
30
+ GoogleBigQueryConnectionInput,
31
+ IntegrationInput,
32
+ IntegrationProcessInput,
33
+ ScheduleInput,
34
+ )
35
+
36
+ logger = logging.getLogger("anaplan_sdk")
37
+
38
+ _json_header = {"Content-Type": "application/json"}
39
+ _gzip_header = {"Content-Type": "application/x-gzip"}
40
+
41
+ T = TypeVar("T", bound=AnaplanModel)
42
+ Task = TypeVar("Task", bound=TaskSummary)
43
+
44
+
45
+ class _HttpService:
46
+ def __init__(self, client: httpx.Client, retry_count: int, page_size: int, poll_delay: int):
47
+ self._client = client
48
+ self._retry_count = retry_count
49
+ self._poll_delay = poll_delay
50
+ self._page_size = min(page_size, 5_000)
51
+
52
+ def get(self, url: str, **kwargs) -> dict[str, Any]:
53
+ return self.__run_with_retry(self._client.get, url, **kwargs).json()
54
+
55
+ def get_binary(self, url: str) -> bytes:
56
+ return self.__run_with_retry(self._client.get, url).content
57
+
58
+ def post(self, url: str, json: dict | list) -> dict[str, Any]:
59
+ return self.__run_with_retry(self._client.post, url, headers=_json_header, json=json).json()
60
+
61
+ def put(self, url: str, json: dict | list) -> dict[str, Any]:
62
+ res = self.__run_with_retry(self._client.put, url, headers=_json_header, json=json)
63
+ return res.json() if res.num_bytes_downloaded > 0 else {}
64
+
65
+ def patch(self, url: str, json: dict | list) -> dict[str, Any]:
66
+ return (
67
+ self.__run_with_retry(self._client.patch, url, headers=_json_header, json=json)
68
+ ).json()
69
+
70
+ def delete(self, url: str) -> dict[str, Any]:
71
+ return (self.__run_with_retry(self._client.delete, url, headers=_json_header)).json()
72
+
73
+ def post_empty(self, url: str, **kwargs) -> dict[str, Any]:
74
+ res = self.__run_with_retry(self._client.post, url, **kwargs)
75
+ return res.json() if res.num_bytes_downloaded > 0 else {}
76
+
77
+ def put_binary_gzip(self, url: str, content: str | bytes) -> Response:
78
+ content = compress(content.encode() if isinstance(content, str) else content)
79
+ return self.__run_with_retry(self._client.put, url, headers=_gzip_header, content=content)
80
+
81
+ def get_paginated(
82
+ self, url: str, result_key: str, page_size: int = 5_000, **kwargs
83
+ ) -> Iterator[dict[str, Any]]:
84
+ logger.debug(f"Starting paginated fetch from {url} with page_size={page_size}.")
85
+ first_page, total_items = self._get_first_page(url, page_size, result_key, **kwargs)
86
+ if total_items <= page_size:
87
+ logger.debug("All items fit in first page, no additional requests needed.")
88
+ return iter(first_page)
89
+
90
+ pages_needed = ceil(total_items / page_size)
91
+ logger.debug(f"Fetching {pages_needed - 1} additional pages with {page_size} items each.")
92
+ with ThreadPoolExecutor() as executor:
93
+ pages = executor.map(
94
+ lambda n: self._get_page(url, page_size, n * page_size, result_key, **kwargs),
95
+ range(1, pages_needed),
96
+ )
97
+ logger.debug(f"Completed paginated fetch of {total_items} total items.")
98
+ return chain(first_page, *pages)
99
+
100
+ def poll_task(self, func: Callable[..., Task], *args) -> Task:
101
+ while (result := func(*args)).task_state != "COMPLETE":
102
+ time.sleep(self._poll_delay)
103
+ return result
104
+
105
+ def _get_page(self, url: str, limit: int, offset: int, result_key: str, **kwargs) -> list:
106
+ logger.debug(f"Fetching page: offset={offset}, limit={limit} from {url}.")
107
+ kwargs["params"] = kwargs.get("params") or {} | {"limit": limit, "offset": offset}
108
+ return self.get(url, **kwargs).get(result_key, [])
109
+
110
+ def _get_first_page(self, url: str, limit: int, result_key: str, **kwargs) -> tuple[list, int]:
111
+ logger.debug(f"Fetching first page with limit={limit} from {url}.")
112
+ kwargs["params"] = kwargs.get("params") or {} | {"limit": limit}
113
+ res = self.get(url, **kwargs)
114
+ total_items, first_page = res["meta"]["paging"]["totalSize"], res.get(result_key, [])
115
+ logger.debug(f"Found {total_items} total items, retrieved {len(first_page)} in first page.")
116
+ return first_page, total_items
117
+
118
+ def __run_with_retry(self, func: Callable[..., Response], *args, **kwargs) -> Response:
119
+ for i in range(max(self._retry_count, 1)):
120
+ try:
121
+ response = func(*args, **kwargs)
122
+ if response.status_code == 429:
123
+ if i >= self._retry_count - 1:
124
+ raise AnaplanException("Rate limit exceeded.")
125
+ backoff_time = max(i, 1) * random.randint(2, 5)
126
+ logger.warning(f"Rate limited. Retrying in {backoff_time} seconds.")
127
+ time.sleep(backoff_time)
128
+ continue
129
+ response.raise_for_status()
130
+ return response
131
+ except HTTPError as error:
132
+ if i >= self._retry_count - 1:
133
+ raise_error(error)
134
+ url = args[0] or kwargs.get("url")
135
+ logger.info(f"Retrying for: {url}")
136
+
137
+ raise AnaplanException("Exhausted all retries without a successful response or Error.")
138
+
139
+
140
+ class _AsyncHttpService:
141
+ def __init__(
142
+ self, client: httpx.AsyncClient, retry_count: int, page_size: int, poll_delay: int
143
+ ):
144
+ self._client = client
145
+ self._retry_count = retry_count
146
+ self._poll_delay = poll_delay
147
+ self._page_size = min(page_size, 5_000)
148
+
149
+ async def get(self, url: str, **kwargs) -> dict[str, Any]:
150
+ return (await self._run_with_retry(self._client.get, url, **kwargs)).json()
151
+
152
+ async def get_binary(self, url: str) -> bytes:
153
+ return (await self._run_with_retry(self._client.get, url)).content
154
+
155
+ async def post(self, url: str, json: dict | list) -> dict[str, Any]:
156
+ return (
157
+ await self._run_with_retry(self._client.post, url, headers=_json_header, json=json)
158
+ ).json()
159
+
160
+ async def put(self, url: str, json: dict | list) -> dict[str, Any]:
161
+ res = await self._run_with_retry(self._client.put, url, headers=_json_header, json=json)
162
+ return res.json() if res.num_bytes_downloaded > 0 else {}
163
+
164
+ async def patch(self, url: str, json: dict | list) -> dict[str, Any]:
165
+ return (
166
+ await self._run_with_retry(self._client.patch, url, headers=_json_header, json=json)
167
+ ).json()
168
+
169
+ async def delete(self, url: str) -> dict[str, Any]:
170
+ return (await self._run_with_retry(self._client.delete, url, headers=_json_header)).json()
171
+
172
+ async def post_empty(self, url: str, **kwargs) -> dict[str, Any]:
173
+ res = await self._run_with_retry(self._client.post, url, **kwargs)
174
+ return res.json() if res.num_bytes_downloaded > 0 else {}
175
+
176
+ async def put_binary_gzip(self, url: str, content: str | bytes) -> Response:
177
+ content = compress(content.encode() if isinstance(content, str) else content)
178
+ return await self._run_with_retry(
179
+ self._client.put, url, headers=_gzip_header, content=content
180
+ )
181
+
182
+ async def get_paginated(
183
+ self, url: str, result_key: str, page_size: int = 5_000, **kwargs
184
+ ) -> Iterator[dict[str, Any]]:
185
+ logger.debug(f"Starting paginated fetch from {url} with page_size={page_size}.")
186
+ first_page, total_items = await self._get_first_page(url, page_size, result_key, **kwargs)
187
+ if total_items <= page_size:
188
+ logger.debug("All items fit in first page, no additional requests needed.")
189
+ return iter(first_page)
190
+ pages = await gather(
191
+ *(
192
+ self._get_page(url, page_size, n * page_size, result_key, **kwargs)
193
+ for n in range(1, ceil(total_items / page_size))
194
+ )
195
+ )
196
+ logger.debug(f"Completed paginated fetch of {total_items} total items.")
197
+ return chain(first_page, *pages)
198
+
199
+ async def poll_task(self, func: Callable[..., Awaitable[Task]], *args) -> Task:
200
+ while (result := await func(*args)).task_state != "COMPLETE":
201
+ await sleep(self._poll_delay)
202
+ return result
203
+
204
+ async def _get_page(self, url: str, limit: int, offset: int, result_key: str, **kwargs) -> list:
205
+ logger.debug(f"Fetching page: offset={offset}, limit={limit} from {url}.")
206
+ kwargs["params"] = kwargs.get("params") or {} | {"limit": limit, "offset": offset}
207
+ return (await self.get(url, **kwargs)).get(result_key, [])
208
+
209
+ async def _get_first_page(
210
+ self, url: str, limit: int, result_key: str, **kwargs
211
+ ) -> tuple[list, int]:
212
+ logger.debug(f"Fetching first page with limit={limit} from {url}.")
213
+ kwargs["params"] = kwargs.get("params") or {} | {"limit": limit}
214
+ res = await self.get(url, **kwargs)
215
+ total_items, first_page = res["meta"]["paging"]["totalSize"], res.get(result_key, [])
216
+ logger.debug(f"Found {total_items} total items, retrieved {len(first_page)} in first page.")
217
+ return first_page, total_items
218
+
219
+ async def _run_with_retry(
220
+ self, func: Callable[..., Coroutine[Any, Any, Response]], *args, **kwargs
221
+ ) -> Response:
222
+ for i in range(max(self._retry_count, 1)):
223
+ try:
224
+ response = await func(*args, **kwargs)
225
+ if response.status_code == 429:
226
+ if i >= self._retry_count - 1:
227
+ raise AnaplanException("Rate limit exceeded.")
228
+ backoff_time = (i + 1) * random.randint(3, 5)
229
+ logger.warning(f"Rate limited. Retrying in {backoff_time} seconds.")
230
+ await asyncio.sleep(backoff_time)
231
+ continue
232
+ response.raise_for_status()
233
+ return response
234
+ except HTTPError as error:
235
+ if i >= self._retry_count - 1:
236
+ raise_error(error)
237
+ url = args[0] or kwargs.get("url")
238
+ logger.info(f"Retrying for: {url}")
239
+
240
+ raise AnaplanException("Exhausted all retries without a successful response or Error.")
241
+
242
+
243
+ def construct_payload(model: Type[T], body: T | dict[str, Any]) -> dict[str, Any]:
244
+ """
245
+ Construct a payload for the given model and body.
246
+ :param model: The model class to use for validation.
247
+ :param body: The body to validate and optionally convert to a dictionary.
248
+ :return: A dictionary representation of the validated body.
249
+ """
250
+ if isinstance(body, dict):
251
+ body = model.model_validate(body)
252
+ return body.model_dump(exclude_none=True, by_alias=True)
253
+
254
+
255
+ def connection_body_payload(body: ConnectionBody | dict[str, Any]) -> dict[str, Any]:
256
+ """
257
+ Construct a payload for the given integration body.
258
+ :param body: The body to validate and optionally convert to a dictionary.
259
+ :return: A dictionary representation of the validated body.
260
+ """
261
+ if isinstance(body, dict):
262
+ if "sasToken" in body:
263
+ body = AzureBlobConnectionInput.model_validate(body)
264
+ elif "secretAccessKey" in body:
265
+ body = AmazonS3ConnectionInput.model_validate(body)
266
+ else:
267
+ body = GoogleBigQueryConnectionInput.model_validate(body)
268
+ return body.model_dump(exclude_none=True, by_alias=True)
269
+
270
+
271
+ def integration_payload(
272
+ body: IntegrationInput | IntegrationProcessInput | dict[str, Any],
273
+ ) -> dict[str, Any]:
274
+ """
275
+ Construct a payload for the given integration body.
276
+ :param body: The body to validate and optionally convert to a dictionary.
277
+ :return: A dictionary representation of the validated body.
278
+ """
279
+ if isinstance(body, dict):
280
+ body = (
281
+ IntegrationInput.model_validate(body)
282
+ if "jobs" in body
283
+ else IntegrationProcessInput.model_validate(body)
284
+ )
285
+ return body.model_dump(exclude_none=True, by_alias=True)
286
+
287
+
288
+ def schedule_payload(
289
+ integration_id: str, schedule: ScheduleInput | dict[str, Any]
290
+ ) -> dict[str, Any]:
291
+ """
292
+ Construct a payload for the given integration ID and schedule.
293
+ :param integration_id: The ID of the integration.
294
+ :param schedule: The schedule to validate and optionally convert to a dictionary.
295
+ :return: A dictionary representation of the validated schedule.
296
+ """
297
+ if isinstance(schedule, dict):
298
+ schedule = ScheduleInput.model_validate(schedule)
299
+ return {
300
+ "integrationId": integration_id,
301
+ "schedule": schedule.model_dump(exclude_none=True, by_alias=True),
302
+ }
303
+
304
+
305
+ def action_url(action_id: int) -> Literal["imports", "exports", "actions", "processes"]:
306
+ """
307
+ Determine the type of action based on its identifier.
308
+ :param action_id: The identifier of the action.
309
+ :return: The type of action.
310
+ """
311
+ if 12000000000 <= action_id < 113000000000:
312
+ return "imports"
313
+ if 116000000000 <= action_id < 117000000000:
314
+ return "exports"
315
+ if 117000000000 <= action_id < 118000000000:
316
+ return "actions"
317
+ if 118000000000 <= action_id < 119000000000:
318
+ return "processes"
319
+ raise InvalidIdentifierException(f"Action '{action_id}' is not a valid identifier.")
320
+
321
+
322
+ def raise_error(error: HTTPError) -> None:
323
+ """
324
+ Raise an appropriate exception based on the error.
325
+ :param error: The error to raise an exception for.
326
+ """
327
+ if isinstance(error, httpx.TimeoutException):
328
+ raise AnaplanTimeoutException from error
329
+ if isinstance(error, httpx.HTTPStatusError):
330
+ if error.response.status_code == 404:
331
+ raise InvalidIdentifierException from error
332
+ logger.error(f"Anaplan Error: [{error.response.status_code}]: {error.response.text}")
333
+ raise AnaplanException(error.response.text) from error
334
+
335
+ logger.error(f"Error: {error}")
336
+ raise AnaplanException from error
337
+
338
+
339
+ def parse_calendar_response(data: dict) -> ModelCalendar:
340
+ """
341
+ Parse calendar response and return appropriate calendar model.
342
+ :param data: The calendar data from the API response.
343
+ :return: The calendar settings of the model based on calendar type.
344
+ """
345
+ calendar_data = data["modelCalendar"]
346
+ cal_type = calendar_data["calendarType"]
347
+ if cal_type == "Calendar Months/Quarters/Years":
348
+ return MonthsQuartersYearsCalendar.model_validate(calendar_data)
349
+ if cal_type == "Weeks: 4-4-5, 4-5-4 or 5-4-4":
350
+ return WeeksGroupingCalendar.model_validate(calendar_data)
351
+ if cal_type == "Weeks: General":
352
+ return WeeksGeneralCalendar.model_validate(calendar_data)
353
+ if cal_type == "Weeks: 13 4-week Periods":
354
+ return WeeksPeriodsCalendar.model_validate(calendar_data)
355
+ raise AnaplanException(
356
+ "Unknown calendar type encountered. Please report this issue: "
357
+ "https://github.com/VinzenzKlass/anaplan-sdk/issues/new"
358
+ )
359
+
360
+
361
+ def parse_insertion_response(data: list[dict]) -> InsertionResult:
362
+ failures, added, ignored, total = [], 0, 0, 0
363
+ for res in data:
364
+ failures.append(res.get("failures", []))
365
+ added += res.get("added", 0)
366
+ total += res.get("total", 0)
367
+ ignored += res.get("ignored", 0)
368
+ return InsertionResult(
369
+ added=added, ignored=ignored, total=total, failures=list(chain.from_iterable(failures))
370
+ )
371
+
372
+
373
+ def validate_dimension_id(dimension_id: int) -> int:
374
+ if not (
375
+ dimension_id == 101999999999
376
+ or 101_000_000_000 <= dimension_id < 102_000_000_000
377
+ or 109_000_000_000 <= dimension_id < 110_000_000_000
378
+ or 114_000_000_000 <= dimension_id < 115_000_000_000
379
+ ):
380
+ raise InvalidIdentifierException(
381
+ "Invalid dimension_id. Must be a List (101xxxxxxxxx), List Subset (109xxxxxxxxx), "
382
+ "Line Item Subset (114xxxxxxxxx), or Users (101999999999)."
383
+ )
384
+ msg = (
385
+ "Using `get_dimension_items` for {} is discouraged. "
386
+ "Prefer `{}` for better performance and more details on the members."
387
+ )
388
+ if dimension_id == 101999999999:
389
+ logger.warning(msg.format("Users", "get_users"))
390
+ if 101000000000 <= dimension_id < 102000000000:
391
+ logger.warning(msg.format("Lists", "get_list_items"))
392
+ return dimension_id
@@ -1,4 +1,12 @@
1
- from ._alm import ModelRevision, Revision, SyncTask
1
+ from ._alm import (
2
+ ModelRevision,
3
+ ReportTask,
4
+ ReportTaskResult,
5
+ Revision,
6
+ SummaryReport,
7
+ SyncTask,
8
+ SyncTaskResult,
9
+ )
2
10
  from ._base import AnaplanModel
3
11
  from ._bulk import (
4
12
  Action,
@@ -10,6 +18,7 @@ from ._bulk import (
10
18
  List,
11
19
  ListMetadata,
12
20
  Model,
21
+ ModelDeletionResult,
13
22
  Process,
14
23
  TaskResult,
15
24
  TaskResultDetail,
@@ -17,7 +26,27 @@ from ._bulk import (
17
26
  TaskSummary,
18
27
  Workspace,
19
28
  )
20
- from ._transactional import Failure, InsertionResult, LineItem, ListItem, ModelStatus, Module, User
29
+ from ._transactional import (
30
+ CurrentPeriod,
31
+ Dimension,
32
+ DimensionWithCode,
33
+ Failure,
34
+ FiscalYear,
35
+ InsertionResult,
36
+ LineItem,
37
+ ListDeletionResult,
38
+ ListItem,
39
+ ModelCalendar,
40
+ ModelStatus,
41
+ Module,
42
+ MonthsQuartersYearsCalendar,
43
+ User,
44
+ View,
45
+ ViewInfo,
46
+ WeeksGeneralCalendar,
47
+ WeeksGroupingCalendar,
48
+ WeeksPeriodsCalendar,
49
+ )
21
50
 
22
51
  __all__ = [
23
52
  "AnaplanModel",
@@ -41,9 +70,27 @@ __all__ = [
41
70
  "TaskResult",
42
71
  "TaskResultDetail",
43
72
  "TaskStatus",
73
+ "TaskSummary",
74
+ "SyncTaskResult",
75
+ "ReportTask",
76
+ "ReportTaskResult",
77
+ "SummaryReport",
44
78
  "SyncTask",
45
79
  "User",
46
80
  "Failure",
47
81
  "InsertionResult",
48
82
  "Revision",
83
+ "CurrentPeriod",
84
+ "FiscalYear",
85
+ "MonthsQuartersYearsCalendar",
86
+ "WeeksGeneralCalendar",
87
+ "WeeksGroupingCalendar",
88
+ "WeeksPeriodsCalendar",
89
+ "Dimension",
90
+ "View",
91
+ "ViewInfo",
92
+ "ModelCalendar",
93
+ "ModelDeletionResult",
94
+ "DimensionWithCode",
95
+ "ListDeletionResult",
49
96
  ]
@@ -1,6 +1,7 @@
1
1
  from pydantic import Field
2
2
 
3
3
  from ._base import AnaplanModel
4
+ from ._bulk import TaskSummary
4
5
 
5
6
 
6
7
  class Revision(AnaplanModel):
@@ -34,8 +35,8 @@ class ModelRevision(AnaplanModel):
34
35
  "workspace."
35
36
  ),
36
37
  )
37
- workspace_id: str = Field(
38
- description="The unique identifier of the workspace this revision belongs to."
38
+ workspace_id: str | None = Field(
39
+ None, description="The unique identifier of the workspace this revision belongs to."
39
40
  )
40
41
  applied_by: str = Field(
41
42
  description="The unique identifier of the user who applied this revision."
@@ -49,7 +50,64 @@ class ModelRevision(AnaplanModel):
49
50
  )
50
51
 
51
52
 
52
- class SyncTask(AnaplanModel):
53
- id: str = Field(validation_alias="taskId", description="The unique identifier of this task.")
54
- task_state: str = Field(description="The state of this task.")
55
- creation_time: int = Field(description="The creation time of this task.")
53
+ class SyncTaskResult(AnaplanModel):
54
+ source_revision_id: str = Field(description="The ID of the source revision.")
55
+ target_revision_id: str = Field(description="The ID of the target revision.")
56
+ successful: bool = Field(description="Whether the sync task was successful or not.")
57
+
58
+
59
+ class SyncTask(TaskSummary):
60
+ current_step: str = Field(description="The current step of the sync task.")
61
+ result: SyncTaskResult | None = Field(None, description="The result of the sync task.")
62
+
63
+
64
+ class ReportTaskResult(SyncTaskResult):
65
+ report_file_url: str = Field(
66
+ description="The URL of the report file generated by the sync task."
67
+ )
68
+
69
+
70
+ class ReportTaskError(AnaplanModel):
71
+ title: str = Field(description="The title of the error.")
72
+ message: str = Field(validation_alias="messageText", description="The message of the error.")
73
+
74
+
75
+ class ReportTaskFailureResult(AnaplanModel):
76
+ successful: bool = Field(description="Whether the sync task was successful or not.")
77
+ error: ReportTaskError = Field(description="The error that occurred during the sync task.")
78
+
79
+
80
+ class ReportTask(SyncTask):
81
+ result: ReportTaskResult | ReportTaskFailureResult | None = Field(
82
+ None, description="The result of the comparison report task, including the report file URL."
83
+ )
84
+
85
+
86
+ class SummaryTotals(AnaplanModel):
87
+ modified: int = Field(0, description="The number of modified items.")
88
+ deleted: int = Field(0, description="The number of deleted items.")
89
+ created: int = Field(0, description="The number of created items.")
90
+
91
+
92
+ class SummaryDifferences(AnaplanModel):
93
+ line_items: SummaryTotals = Field(
94
+ SummaryTotals(modified=0, deleted=0, created=0), description="Changes in line items."
95
+ )
96
+ roles_contents: SummaryTotals = Field(
97
+ SummaryTotals(modified=0, deleted=0, created=0), description="Changes in roles contents."
98
+ )
99
+ lists: SummaryTotals = Field(
100
+ SummaryTotals(modified=0, deleted=0, created=0), description="Changes in lists."
101
+ )
102
+ modules: SummaryTotals = Field(
103
+ SummaryTotals(modified=0, deleted=0, created=0), description="Changes in modules."
104
+ )
105
+
106
+
107
+ class SummaryReport(AnaplanModel):
108
+ target_revision_id: str = Field(description="The ID of the target revision.")
109
+ source_revision_id: str = Field(description="The ID of the source revision.")
110
+ totals: SummaryTotals = Field(description="The total counts of changes.")
111
+ differences: SummaryDifferences = Field(
112
+ description="The detailed breakdown of changes by category."
113
+ )
@@ -1,7 +1,6 @@
1
1
  from typing import Literal, TypeAlias
2
2
 
3
- from pydantic import ConfigDict, Field, field_validator
4
- from pydantic.alias_generators import to_camel
3
+ from pydantic import Field, field_validator
5
4
 
6
5
  from ._base import AnaplanModel
7
6
 
@@ -164,13 +163,21 @@ class TaskResult(AnaplanModel):
164
163
  )
165
164
 
166
165
 
167
- class TaskStatus(AnaplanModel):
168
- model_config = ConfigDict(alias_generator=to_camel, populate_by_name=True)
169
- id: str = Field(validation_alias="taskId", description="The unique identifier of this task.")
170
- task_state: Literal["NOT_STARTED", "IN_PROGRESS", "COMPLETE"] = Field(
171
- description="The state of this task."
172
- )
173
- creation_time: int = Field(description="Unix timestamp of when this task was created.")
166
+ class TaskStatus(TaskSummary):
174
167
  progress: float = Field(description="The progress of this task as a float between 0 and 1.")
175
168
  current_step: str | None = Field(None, description="The current step of this task.")
176
169
  result: TaskResult | None = Field(None)
170
+
171
+
172
+ class DeletionFailure(AnaplanModel):
173
+ model_id: str = Field(description="The unique identifier of the model that failed to delete.")
174
+ message: str = Field(description="The error message explaining why the deletion failed.")
175
+
176
+
177
+ class ModelDeletionResult(AnaplanModel):
178
+ models_deleted: int = Field(description="The number of models that were successfully deleted.")
179
+ failures: list[DeletionFailure] = Field(
180
+ [],
181
+ validation_alias="bulkDeleteModelsFailures",
182
+ description="List of models that failed to delete with their error messages.",
183
+ )