anaplan-sdk 0.4.4a4__py3-none-any.whl → 0.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- anaplan_sdk/__init__.py +2 -0
- anaplan_sdk/_async_clients/__init__.py +4 -0
- anaplan_sdk/_async_clients/_alm.py +257 -44
- anaplan_sdk/_async_clients/_audit.py +31 -21
- anaplan_sdk/_async_clients/_bulk.py +276 -168
- anaplan_sdk/_async_clients/_cloud_works.py +61 -41
- anaplan_sdk/_async_clients/_cw_flow.py +26 -18
- anaplan_sdk/_async_clients/_scim.py +148 -0
- anaplan_sdk/_async_clients/_transactional.py +265 -56
- anaplan_sdk/_auth.py +5 -4
- anaplan_sdk/_clients/__init__.py +12 -1
- anaplan_sdk/_clients/_alm.py +255 -45
- anaplan_sdk/_clients/_audit.py +32 -22
- anaplan_sdk/_clients/_bulk.py +271 -152
- anaplan_sdk/_clients/_cloud_works.py +59 -40
- anaplan_sdk/_clients/_cw_flow.py +24 -16
- anaplan_sdk/_clients/_scim.py +145 -0
- anaplan_sdk/_clients/_transactional.py +260 -50
- anaplan_sdk/_services.py +277 -0
- anaplan_sdk/_utils.py +188 -0
- anaplan_sdk/models/__init__.py +49 -2
- anaplan_sdk/models/_alm.py +64 -6
- anaplan_sdk/models/_bulk.py +22 -13
- anaplan_sdk/models/_transactional.py +221 -4
- anaplan_sdk/models/cloud_works.py +6 -2
- anaplan_sdk/models/scim.py +282 -0
- {anaplan_sdk-0.4.4a4.dist-info → anaplan_sdk-0.5.0.dist-info}/METADATA +4 -3
- anaplan_sdk-0.5.0.dist-info/RECORD +34 -0
- anaplan_sdk/_base.py +0 -297
- anaplan_sdk-0.4.4a4.dist-info/RECORD +0 -30
- {anaplan_sdk-0.4.4a4.dist-info → anaplan_sdk-0.5.0.dist-info}/WHEEL +0 -0
- {anaplan_sdk-0.4.4a4.dist-info → anaplan_sdk-0.5.0.dist-info}/licenses/LICENSE +0 -0
anaplan_sdk/_services.py
ADDED
@@ -0,0 +1,277 @@
|
|
1
|
+
import asyncio
|
2
|
+
import logging
|
3
|
+
import time
|
4
|
+
from asyncio import gather, sleep
|
5
|
+
from concurrent.futures import ThreadPoolExecutor
|
6
|
+
from gzip import compress
|
7
|
+
from itertools import chain
|
8
|
+
from math import ceil
|
9
|
+
from typing import Any, Awaitable, Callable, Coroutine, Iterator, TypeVar
|
10
|
+
|
11
|
+
import httpx
|
12
|
+
from httpx import HTTPError, Response
|
13
|
+
|
14
|
+
from .exceptions import AnaplanException, AnaplanTimeoutException, InvalidIdentifierException
|
15
|
+
from .models import TaskSummary
|
16
|
+
|
17
|
+
SORT_WARNING = (
|
18
|
+
"If you are sorting by a field that is potentially ambiguous (e.g., name), the order of "
|
19
|
+
"results is not guaranteed to be internally consistent across multiple requests. This will "
|
20
|
+
"lead to wrong results when paginating through result sets where the ambiguous order can cause "
|
21
|
+
"records to slip between pages or be duplicated on multiple pages. The only way to ensure "
|
22
|
+
"correct results when sorting is to make sure the entire result set fits in one page, or to "
|
23
|
+
"sort by a field that is guaranteed to be unique (e.g., id)."
|
24
|
+
)
|
25
|
+
|
26
|
+
logger = logging.getLogger("anaplan_sdk")
|
27
|
+
|
28
|
+
_json_header = {"Content-Type": "application/json"}
|
29
|
+
_gzip_header = {"Content-Type": "application/x-gzip"}
|
30
|
+
|
31
|
+
Task = TypeVar("Task", bound=TaskSummary)
|
32
|
+
|
33
|
+
|
34
|
+
class _HttpService:
|
35
|
+
def __init__(
|
36
|
+
self,
|
37
|
+
client: httpx.Client,
|
38
|
+
*,
|
39
|
+
retry_count: int,
|
40
|
+
backoff: float,
|
41
|
+
backoff_factor: float,
|
42
|
+
page_size: int,
|
43
|
+
poll_delay: int,
|
44
|
+
):
|
45
|
+
logger.debug(
|
46
|
+
f"Initializing HttpService with retry_count={retry_count}, "
|
47
|
+
f"page_size={page_size}, poll_delay={poll_delay}."
|
48
|
+
)
|
49
|
+
self._client = client
|
50
|
+
self._retry_count = retry_count
|
51
|
+
self._backoff = backoff
|
52
|
+
self._backoff_factor = backoff_factor
|
53
|
+
self._poll_delay = poll_delay
|
54
|
+
self._page_size = min(page_size, 5_000)
|
55
|
+
|
56
|
+
def get(self, url: str, **kwargs) -> dict[str, Any]:
|
57
|
+
return self.__run_with_retry(self._client.get, url, **kwargs).json()
|
58
|
+
|
59
|
+
def get_binary(self, url: str) -> bytes:
|
60
|
+
return self.__run_with_retry(self._client.get, url).content
|
61
|
+
|
62
|
+
def post(self, url: str, json: dict | list) -> dict[str, Any]:
|
63
|
+
return self.__run_with_retry(self._client.post, url, headers=_json_header, json=json).json()
|
64
|
+
|
65
|
+
def put(self, url: str, json: dict | list) -> dict[str, Any]:
|
66
|
+
res = self.__run_with_retry(self._client.put, url, headers=_json_header, json=json)
|
67
|
+
return res.json() if res.num_bytes_downloaded > 0 else {}
|
68
|
+
|
69
|
+
def patch(self, url: str, json: dict | list) -> dict[str, Any]:
|
70
|
+
return (
|
71
|
+
self.__run_with_retry(self._client.patch, url, headers=_json_header, json=json)
|
72
|
+
).json()
|
73
|
+
|
74
|
+
def delete(self, url: str) -> dict[str, Any]:
|
75
|
+
return (self.__run_with_retry(self._client.delete, url, headers=_json_header)).json()
|
76
|
+
|
77
|
+
def post_empty(self, url: str, **kwargs) -> dict[str, Any]:
|
78
|
+
res = self.__run_with_retry(self._client.post, url, **kwargs)
|
79
|
+
return res.json() if res.num_bytes_downloaded > 0 else {}
|
80
|
+
|
81
|
+
def put_binary_gzip(self, url: str, content: str | bytes) -> Response:
|
82
|
+
content = compress(content.encode() if isinstance(content, str) else content)
|
83
|
+
return self.__run_with_retry(self._client.put, url, headers=_gzip_header, content=content)
|
84
|
+
|
85
|
+
def poll_task(self, func: Callable[..., Task], *args) -> Task:
|
86
|
+
while (result := func(*args)).task_state != "COMPLETE":
|
87
|
+
time.sleep(self._poll_delay)
|
88
|
+
return result
|
89
|
+
|
90
|
+
def get_paginated(self, url: str, result_key: str, **kwargs) -> Iterator[dict[str, Any]]:
|
91
|
+
logger.debug(f"Starting paginated fetch from {url} with page_size={self._page_size}.")
|
92
|
+
first_page, total_items, actual_size = self._get_first_page(url, result_key, **kwargs)
|
93
|
+
if total_items <= self._page_size:
|
94
|
+
logger.debug("All items fit in first page, no additional requests needed.")
|
95
|
+
return iter(first_page)
|
96
|
+
if kwargs and (kwargs.get("params") or {}).get("sort", None):
|
97
|
+
logger.warning(SORT_WARNING)
|
98
|
+
pages_needed = ceil(total_items / actual_size)
|
99
|
+
logger.debug(f"Fetching {pages_needed - 1} additional pages with {actual_size} items each.")
|
100
|
+
with ThreadPoolExecutor() as executor:
|
101
|
+
pages = executor.map(
|
102
|
+
lambda n: self._get_page(url, actual_size, n * actual_size, result_key, **kwargs),
|
103
|
+
range(1, pages_needed),
|
104
|
+
)
|
105
|
+
logger.debug(f"Completed paginated fetch of {total_items} total items.")
|
106
|
+
return chain(first_page, *pages)
|
107
|
+
|
108
|
+
def _get_page(self, url: str, limit: int, offset: int, result_key: str, **kwargs) -> list:
|
109
|
+
logger.debug(f"Fetching page: offset={offset}, limit={limit} from {url}.")
|
110
|
+
kwargs["params"] = (kwargs.get("params") or {}) | {"limit": limit, "offset": offset}
|
111
|
+
return self.get(url, **kwargs).get(result_key, [])
|
112
|
+
|
113
|
+
def _get_first_page(self, url: str, result_key: str, **kwargs) -> tuple[list, int, int]:
|
114
|
+
logger.debug(f"Fetching first page with limit={self._page_size} from {url}.")
|
115
|
+
kwargs["params"] = (kwargs.get("params") or {}) | {"limit": self._page_size}
|
116
|
+
res = self.get(url, **kwargs)
|
117
|
+
return _extract_first_page(res, result_key, self._page_size)
|
118
|
+
|
119
|
+
def __run_with_retry(self, func: Callable[..., Response], *args, **kwargs) -> Response:
|
120
|
+
for i in range(max(self._retry_count, 1)):
|
121
|
+
try:
|
122
|
+
response = func(*args, **kwargs)
|
123
|
+
if response.status_code == 429:
|
124
|
+
if i >= self._retry_count - 1:
|
125
|
+
raise AnaplanException("Rate limit exceeded.")
|
126
|
+
backoff_time = self._backoff * (self._backoff_factor if i > 0 else 1)
|
127
|
+
logger.warning(f"Rate limited. Retrying in {backoff_time} seconds.")
|
128
|
+
time.sleep(backoff_time)
|
129
|
+
continue
|
130
|
+
response.raise_for_status()
|
131
|
+
return response
|
132
|
+
except HTTPError as error:
|
133
|
+
if i >= self._retry_count - 1:
|
134
|
+
_raise_error(error)
|
135
|
+
url = args[0] or kwargs.get("url")
|
136
|
+
logger.info(f"Retrying for: {url}")
|
137
|
+
|
138
|
+
raise AnaplanException("Exhausted all retries without a successful response or Error.")
|
139
|
+
|
140
|
+
|
141
|
+
class _AsyncHttpService:
|
142
|
+
def __init__(
|
143
|
+
self,
|
144
|
+
client: httpx.AsyncClient,
|
145
|
+
*,
|
146
|
+
retry_count: int,
|
147
|
+
backoff: float,
|
148
|
+
backoff_factor: float,
|
149
|
+
page_size: int,
|
150
|
+
poll_delay: int,
|
151
|
+
):
|
152
|
+
logger.debug(
|
153
|
+
f"Initializing AsyncHttpService with retry_count={retry_count}, "
|
154
|
+
f"page_size={page_size}, poll_delay={poll_delay}."
|
155
|
+
)
|
156
|
+
self._client = client
|
157
|
+
self._retry_count = retry_count
|
158
|
+
self._backoff = backoff
|
159
|
+
self._backoff_factor = backoff_factor
|
160
|
+
self._poll_delay = poll_delay
|
161
|
+
self._page_size = min(page_size, 5_000)
|
162
|
+
|
163
|
+
async def get(self, url: str, **kwargs) -> dict[str, Any]:
|
164
|
+
return (await self._run_with_retry(self._client.get, url, **kwargs)).json()
|
165
|
+
|
166
|
+
async def get_binary(self, url: str) -> bytes:
|
167
|
+
return (await self._run_with_retry(self._client.get, url)).content
|
168
|
+
|
169
|
+
async def post(self, url: str, json: dict | list) -> dict[str, Any]:
|
170
|
+
return (
|
171
|
+
await self._run_with_retry(self._client.post, url, headers=_json_header, json=json)
|
172
|
+
).json()
|
173
|
+
|
174
|
+
async def put(self, url: str, json: dict | list) -> dict[str, Any]:
|
175
|
+
res = await self._run_with_retry(self._client.put, url, headers=_json_header, json=json)
|
176
|
+
return res.json() if res.num_bytes_downloaded > 0 else {}
|
177
|
+
|
178
|
+
async def patch(self, url: str, json: dict | list) -> dict[str, Any]:
|
179
|
+
return (
|
180
|
+
await self._run_with_retry(self._client.patch, url, headers=_json_header, json=json)
|
181
|
+
).json()
|
182
|
+
|
183
|
+
async def delete(self, url: str) -> dict[str, Any]:
|
184
|
+
return (await self._run_with_retry(self._client.delete, url, headers=_json_header)).json()
|
185
|
+
|
186
|
+
async def post_empty(self, url: str, **kwargs) -> dict[str, Any]:
|
187
|
+
res = await self._run_with_retry(self._client.post, url, **kwargs)
|
188
|
+
return res.json() if res.num_bytes_downloaded > 0 else {}
|
189
|
+
|
190
|
+
async def put_binary_gzip(self, url: str, content: str | bytes) -> Response:
|
191
|
+
content = compress(content.encode() if isinstance(content, str) else content)
|
192
|
+
return await self._run_with_retry(
|
193
|
+
self._client.put, url, headers=_gzip_header, content=content
|
194
|
+
)
|
195
|
+
|
196
|
+
async def poll_task(self, func: Callable[..., Awaitable[Task]], *args) -> Task:
|
197
|
+
while (result := await func(*args)).task_state != "COMPLETE":
|
198
|
+
await sleep(self._poll_delay)
|
199
|
+
return result
|
200
|
+
|
201
|
+
async def get_paginated(self, url: str, result_key: str, **kwargs) -> Iterator[dict[str, Any]]:
|
202
|
+
logger.debug(f"Starting paginated fetch from {url} with page_size={self._page_size}.")
|
203
|
+
first_page, total_items, actual_size = await self._get_first_page(url, result_key, **kwargs)
|
204
|
+
if total_items <= self._page_size:
|
205
|
+
logger.debug("All items fit in first page, no additional requests needed.")
|
206
|
+
return iter(first_page)
|
207
|
+
if kwargs and (kwargs.get("params") or {}).get("sort", None):
|
208
|
+
logger.warning(SORT_WARNING)
|
209
|
+
pages = await gather(
|
210
|
+
*(
|
211
|
+
self._get_page(url, actual_size, n * actual_size, result_key, **kwargs)
|
212
|
+
for n in range(1, ceil(total_items / actual_size))
|
213
|
+
)
|
214
|
+
)
|
215
|
+
logger.debug(f"Completed paginated fetch of {total_items} total items.")
|
216
|
+
return chain(first_page, *pages)
|
217
|
+
|
218
|
+
async def _get_page(self, url: str, limit: int, offset: int, result_key: str, **kwargs) -> list:
|
219
|
+
logger.debug(f"Fetching page: offset={offset}, limit={limit} from {url}.")
|
220
|
+
kwargs["params"] = (kwargs.get("params") or {}) | {"limit": limit, "offset": offset}
|
221
|
+
return (await self.get(url, **kwargs)).get(result_key, [])
|
222
|
+
|
223
|
+
async def _get_first_page(self, url: str, result_key: str, **kwargs) -> tuple[list, int, int]:
|
224
|
+
logger.debug(f"Fetching first page with limit={self._page_size} from {url}.")
|
225
|
+
kwargs["params"] = (kwargs.get("params") or {}) | {"limit": self._page_size}
|
226
|
+
res = await self.get(url, **kwargs)
|
227
|
+
return _extract_first_page(res, result_key, self._page_size)
|
228
|
+
|
229
|
+
async def _run_with_retry(
|
230
|
+
self, func: Callable[..., Coroutine[Any, Any, Response]], *args, **kwargs
|
231
|
+
) -> Response:
|
232
|
+
for i in range(max(self._retry_count, 1)):
|
233
|
+
try:
|
234
|
+
response = await func(*args, **kwargs)
|
235
|
+
if response.status_code == 429:
|
236
|
+
if i >= self._retry_count - 1:
|
237
|
+
raise AnaplanException("Rate limit exceeded.")
|
238
|
+
backoff_time = self._backoff * (self._backoff_factor if i > 0 else 1)
|
239
|
+
logger.warning(f"Rate limited. Retrying in {backoff_time} seconds.")
|
240
|
+
await asyncio.sleep(backoff_time)
|
241
|
+
continue
|
242
|
+
response.raise_for_status()
|
243
|
+
return response
|
244
|
+
except HTTPError as error:
|
245
|
+
if i >= self._retry_count - 1:
|
246
|
+
_raise_error(error)
|
247
|
+
url = args[0] or kwargs.get("url")
|
248
|
+
logger.info(f"Retrying for: {url}")
|
249
|
+
|
250
|
+
raise AnaplanException("Exhausted all retries without a successful response or Error.")
|
251
|
+
|
252
|
+
|
253
|
+
def _extract_first_page(
|
254
|
+
res: dict[str, Any], result_key: str, page_size: int
|
255
|
+
) -> tuple[list[dict[str, Any]], int, int]:
|
256
|
+
total_items, first_page = res["meta"]["paging"]["totalSize"], res.get(result_key, [])
|
257
|
+
actual_page_size = res["meta"]["paging"]["currentPageSize"]
|
258
|
+
if actual_page_size < page_size and not actual_page_size == total_items:
|
259
|
+
logger.warning(
|
260
|
+
f"Page size {page_size} was silently truncated to {actual_page_size}."
|
261
|
+
f"Using the server-side enforced page size {actual_page_size} for further requests."
|
262
|
+
)
|
263
|
+
logger.debug(f"Found {total_items} total items, retrieved {len(first_page)} in first page.")
|
264
|
+
return first_page, total_items, actual_page_size
|
265
|
+
|
266
|
+
|
267
|
+
def _raise_error(error: HTTPError) -> None:
|
268
|
+
if isinstance(error, httpx.TimeoutException):
|
269
|
+
raise AnaplanTimeoutException from error
|
270
|
+
if isinstance(error, httpx.HTTPStatusError):
|
271
|
+
if error.response.status_code == 404:
|
272
|
+
raise InvalidIdentifierException from error
|
273
|
+
logger.error(f"Anaplan Error: [{error.response.status_code}]: {error.response.text}")
|
274
|
+
raise AnaplanException(error.response.text) from error
|
275
|
+
|
276
|
+
logger.error(f"Error: {error}")
|
277
|
+
raise AnaplanException from error
|
anaplan_sdk/_utils.py
ADDED
@@ -0,0 +1,188 @@
|
|
1
|
+
from itertools import chain
|
2
|
+
from typing import Any, Literal, Type, TypeVar
|
3
|
+
|
4
|
+
from pydantic.alias_generators import to_camel
|
5
|
+
|
6
|
+
from anaplan_sdk._services import logger
|
7
|
+
from anaplan_sdk.exceptions import AnaplanException, InvalidIdentifierException
|
8
|
+
from anaplan_sdk.models import (
|
9
|
+
AnaplanModel,
|
10
|
+
InsertionResult,
|
11
|
+
ModelCalendar,
|
12
|
+
MonthsQuartersYearsCalendar,
|
13
|
+
WeeksGeneralCalendar,
|
14
|
+
WeeksGroupingCalendar,
|
15
|
+
WeeksPeriodsCalendar,
|
16
|
+
)
|
17
|
+
from anaplan_sdk.models.cloud_works import (
|
18
|
+
AmazonS3ConnectionInput,
|
19
|
+
AzureBlobConnectionInput,
|
20
|
+
ConnectionBody,
|
21
|
+
GoogleBigQueryConnectionInput,
|
22
|
+
IntegrationInput,
|
23
|
+
IntegrationProcessInput,
|
24
|
+
ScheduleInput,
|
25
|
+
)
|
26
|
+
|
27
|
+
T = TypeVar("T", bound=AnaplanModel)
|
28
|
+
|
29
|
+
|
30
|
+
def models_url(only_in_workspace: bool | str, workspace_id: str | None) -> str:
|
31
|
+
if isinstance(only_in_workspace, bool) and only_in_workspace:
|
32
|
+
if not workspace_id:
|
33
|
+
raise ValueError(
|
34
|
+
"Cannot list models in the current workspace, since no workspace Id was "
|
35
|
+
"provided when instantiating the client. Either provide a workspace Id when "
|
36
|
+
"instantiating the client, or pass a specific workspace Id to this method."
|
37
|
+
)
|
38
|
+
return f"https://api.anaplan.com/2/0/workspaces/{workspace_id}/models"
|
39
|
+
if isinstance(only_in_workspace, str):
|
40
|
+
return f"https://api.anaplan.com/2/0/workspaces/{only_in_workspace}/models"
|
41
|
+
return "https://api.anaplan.com/2/0/models"
|
42
|
+
|
43
|
+
|
44
|
+
def sort_params(sort_by: str | None, descending: bool) -> dict[str, str | bool]:
|
45
|
+
"""
|
46
|
+
Construct search parameters for sorting. This also converts snake_case to camelCase.
|
47
|
+
:param sort_by: The field to sort by, optionally in snake_case.
|
48
|
+
:param descending: Whether to sort in descending order.
|
49
|
+
:return: A dictionary of search parameters in Anaplan's expected format.
|
50
|
+
"""
|
51
|
+
if not sort_by:
|
52
|
+
return {}
|
53
|
+
return {"sort": f"{'-' if descending else '+'}{to_camel(sort_by)}"}
|
54
|
+
|
55
|
+
|
56
|
+
def construct_payload(model: Type[T], body: T | dict[str, Any]) -> dict[str, Any]:
|
57
|
+
"""
|
58
|
+
Construct a payload for the given model and body.
|
59
|
+
:param model: The model class to use for validation.
|
60
|
+
:param body: The body to validate and optionally convert to a dictionary.
|
61
|
+
:return: A dictionary representation of the validated body.
|
62
|
+
"""
|
63
|
+
if isinstance(body, dict):
|
64
|
+
body = model.model_validate(body)
|
65
|
+
return body.model_dump(exclude_none=True, by_alias=True)
|
66
|
+
|
67
|
+
|
68
|
+
def connection_body_payload(body: ConnectionBody | dict[str, Any]) -> dict[str, Any]:
|
69
|
+
"""
|
70
|
+
Construct a payload for the given integration body.
|
71
|
+
:param body: The body to validate and optionally convert to a dictionary.
|
72
|
+
:return: A dictionary representation of the validated body.
|
73
|
+
"""
|
74
|
+
if isinstance(body, dict):
|
75
|
+
if "sasToken" in body:
|
76
|
+
body = AzureBlobConnectionInput.model_validate(body)
|
77
|
+
elif "secretAccessKey" in body:
|
78
|
+
body = AmazonS3ConnectionInput.model_validate(body)
|
79
|
+
else:
|
80
|
+
body = GoogleBigQueryConnectionInput.model_validate(body)
|
81
|
+
return body.model_dump(exclude_none=True, by_alias=True)
|
82
|
+
|
83
|
+
|
84
|
+
def integration_payload(
|
85
|
+
body: IntegrationInput | IntegrationProcessInput | dict[str, Any],
|
86
|
+
) -> dict[str, Any]:
|
87
|
+
"""
|
88
|
+
Construct a payload for the given integration body.
|
89
|
+
:param body: The body to validate and optionally convert to a dictionary.
|
90
|
+
:return: A dictionary representation of the validated body.
|
91
|
+
"""
|
92
|
+
if isinstance(body, dict):
|
93
|
+
body = (
|
94
|
+
IntegrationInput.model_validate(body)
|
95
|
+
if "jobs" in body
|
96
|
+
else IntegrationProcessInput.model_validate(body)
|
97
|
+
)
|
98
|
+
return body.model_dump(exclude_none=True, by_alias=True)
|
99
|
+
|
100
|
+
|
101
|
+
def schedule_payload(
|
102
|
+
integration_id: str, schedule: ScheduleInput | dict[str, Any]
|
103
|
+
) -> dict[str, Any]:
|
104
|
+
"""
|
105
|
+
Construct a payload for the given integration ID and schedule.
|
106
|
+
:param integration_id: The ID of the integration.
|
107
|
+
:param schedule: The schedule to validate and optionally convert to a dictionary.
|
108
|
+
:return: A dictionary representation of the validated schedule.
|
109
|
+
"""
|
110
|
+
if isinstance(schedule, dict):
|
111
|
+
schedule = ScheduleInput.model_validate(schedule)
|
112
|
+
return {
|
113
|
+
"integrationId": integration_id,
|
114
|
+
"schedule": schedule.model_dump(exclude_none=True, by_alias=True),
|
115
|
+
}
|
116
|
+
|
117
|
+
|
118
|
+
def action_url(action_id: int) -> Literal["imports", "exports", "actions", "processes"]:
|
119
|
+
"""
|
120
|
+
Determine the type of action based on its identifier.
|
121
|
+
:param action_id: The identifier of the action.
|
122
|
+
:return: The type of action.
|
123
|
+
"""
|
124
|
+
if 12000000000 <= action_id < 113000000000:
|
125
|
+
return "imports"
|
126
|
+
if 116000000000 <= action_id < 117000000000:
|
127
|
+
return "exports"
|
128
|
+
if 117000000000 <= action_id < 118000000000:
|
129
|
+
return "actions"
|
130
|
+
if 118000000000 <= action_id < 119000000000:
|
131
|
+
return "processes"
|
132
|
+
raise InvalidIdentifierException(f"Action '{action_id}' is not a valid identifier.")
|
133
|
+
|
134
|
+
|
135
|
+
def parse_calendar_response(data: dict) -> ModelCalendar:
|
136
|
+
"""
|
137
|
+
Parse calendar response and return appropriate calendar model.
|
138
|
+
:param data: The calendar data from the API response.
|
139
|
+
:return: The calendar settings of the model based on calendar type.
|
140
|
+
"""
|
141
|
+
calendar_data = data["modelCalendar"]
|
142
|
+
cal_type = calendar_data["calendarType"]
|
143
|
+
if cal_type == "Calendar Months/Quarters/Years":
|
144
|
+
return MonthsQuartersYearsCalendar.model_validate(calendar_data)
|
145
|
+
if cal_type == "Weeks: 4-4-5, 4-5-4 or 5-4-4":
|
146
|
+
return WeeksGroupingCalendar.model_validate(calendar_data)
|
147
|
+
if cal_type == "Weeks: General":
|
148
|
+
return WeeksGeneralCalendar.model_validate(calendar_data)
|
149
|
+
if cal_type == "Weeks: 13 4-week Periods":
|
150
|
+
return WeeksPeriodsCalendar.model_validate(calendar_data)
|
151
|
+
raise AnaplanException(
|
152
|
+
"Unknown calendar type encountered. Please report this issue: "
|
153
|
+
"https://github.com/VinzenzKlass/anaplan-sdk/issues/new"
|
154
|
+
)
|
155
|
+
|
156
|
+
|
157
|
+
def parse_insertion_response(data: list[dict]) -> InsertionResult:
|
158
|
+
failures, added, ignored, total = [], 0, 0, 0
|
159
|
+
for res in data:
|
160
|
+
failures.append(res.get("failures", []))
|
161
|
+
added += res.get("added", 0)
|
162
|
+
total += res.get("total", 0)
|
163
|
+
ignored += res.get("ignored", 0)
|
164
|
+
return InsertionResult(
|
165
|
+
added=added, ignored=ignored, total=total, failures=list(chain.from_iterable(failures))
|
166
|
+
)
|
167
|
+
|
168
|
+
|
169
|
+
def validate_dimension_id(dimension_id: int) -> int:
|
170
|
+
if not (
|
171
|
+
dimension_id == 101999999999
|
172
|
+
or 101_000_000_000 <= dimension_id < 102_000_000_000
|
173
|
+
or 109_000_000_000 <= dimension_id < 110_000_000_000
|
174
|
+
or 114_000_000_000 <= dimension_id < 115_000_000_000
|
175
|
+
):
|
176
|
+
raise InvalidIdentifierException(
|
177
|
+
"Invalid dimension_id. Must be a List (101xxxxxxxxx), List Subset (109xxxxxxxxx), "
|
178
|
+
"Line Item Subset (114xxxxxxxxx), or Users (101999999999)."
|
179
|
+
)
|
180
|
+
msg = (
|
181
|
+
"Using `get_dimension_items` for {} is discouraged. "
|
182
|
+
"Prefer `{}` for better performance and more details on the members."
|
183
|
+
)
|
184
|
+
if dimension_id == 101999999999:
|
185
|
+
logger.warning(msg.format("Users", "get_users"))
|
186
|
+
if 101000000000 <= dimension_id < 102000000000:
|
187
|
+
logger.warning(msg.format("Lists", "get_list_items"))
|
188
|
+
return dimension_id
|
anaplan_sdk/models/__init__.py
CHANGED
@@ -1,4 +1,12 @@
|
|
1
|
-
from ._alm import
|
1
|
+
from ._alm import (
|
2
|
+
ModelRevision,
|
3
|
+
ReportTask,
|
4
|
+
ReportTaskResult,
|
5
|
+
Revision,
|
6
|
+
SummaryReport,
|
7
|
+
SyncTask,
|
8
|
+
SyncTaskResult,
|
9
|
+
)
|
2
10
|
from ._base import AnaplanModel
|
3
11
|
from ._bulk import (
|
4
12
|
Action,
|
@@ -10,6 +18,7 @@ from ._bulk import (
|
|
10
18
|
List,
|
11
19
|
ListMetadata,
|
12
20
|
Model,
|
21
|
+
ModelDeletionResult,
|
13
22
|
Process,
|
14
23
|
TaskResult,
|
15
24
|
TaskResultDetail,
|
@@ -17,7 +26,27 @@ from ._bulk import (
|
|
17
26
|
TaskSummary,
|
18
27
|
Workspace,
|
19
28
|
)
|
20
|
-
from ._transactional import
|
29
|
+
from ._transactional import (
|
30
|
+
CurrentPeriod,
|
31
|
+
Dimension,
|
32
|
+
DimensionWithCode,
|
33
|
+
Failure,
|
34
|
+
FiscalYear,
|
35
|
+
InsertionResult,
|
36
|
+
LineItem,
|
37
|
+
ListDeletionResult,
|
38
|
+
ListItem,
|
39
|
+
ModelCalendar,
|
40
|
+
ModelStatus,
|
41
|
+
Module,
|
42
|
+
MonthsQuartersYearsCalendar,
|
43
|
+
User,
|
44
|
+
View,
|
45
|
+
ViewInfo,
|
46
|
+
WeeksGeneralCalendar,
|
47
|
+
WeeksGroupingCalendar,
|
48
|
+
WeeksPeriodsCalendar,
|
49
|
+
)
|
21
50
|
|
22
51
|
__all__ = [
|
23
52
|
"AnaplanModel",
|
@@ -41,9 +70,27 @@ __all__ = [
|
|
41
70
|
"TaskResult",
|
42
71
|
"TaskResultDetail",
|
43
72
|
"TaskStatus",
|
73
|
+
"TaskSummary",
|
74
|
+
"SyncTaskResult",
|
75
|
+
"ReportTask",
|
76
|
+
"ReportTaskResult",
|
77
|
+
"SummaryReport",
|
44
78
|
"SyncTask",
|
45
79
|
"User",
|
46
80
|
"Failure",
|
47
81
|
"InsertionResult",
|
48
82
|
"Revision",
|
83
|
+
"CurrentPeriod",
|
84
|
+
"FiscalYear",
|
85
|
+
"MonthsQuartersYearsCalendar",
|
86
|
+
"WeeksGeneralCalendar",
|
87
|
+
"WeeksGroupingCalendar",
|
88
|
+
"WeeksPeriodsCalendar",
|
89
|
+
"Dimension",
|
90
|
+
"View",
|
91
|
+
"ViewInfo",
|
92
|
+
"ModelCalendar",
|
93
|
+
"ModelDeletionResult",
|
94
|
+
"DimensionWithCode",
|
95
|
+
"ListDeletionResult",
|
49
96
|
]
|
anaplan_sdk/models/_alm.py
CHANGED
@@ -1,6 +1,7 @@
|
|
1
1
|
from pydantic import Field
|
2
2
|
|
3
3
|
from ._base import AnaplanModel
|
4
|
+
from ._bulk import TaskSummary
|
4
5
|
|
5
6
|
|
6
7
|
class Revision(AnaplanModel):
|
@@ -34,8 +35,8 @@ class ModelRevision(AnaplanModel):
|
|
34
35
|
"workspace."
|
35
36
|
),
|
36
37
|
)
|
37
|
-
workspace_id: str = Field(
|
38
|
-
description="The unique identifier of the workspace this revision belongs to."
|
38
|
+
workspace_id: str | None = Field(
|
39
|
+
None, description="The unique identifier of the workspace this revision belongs to."
|
39
40
|
)
|
40
41
|
applied_by: str = Field(
|
41
42
|
description="The unique identifier of the user who applied this revision."
|
@@ -49,7 +50,64 @@ class ModelRevision(AnaplanModel):
|
|
49
50
|
)
|
50
51
|
|
51
52
|
|
52
|
-
class
|
53
|
-
|
54
|
-
|
55
|
-
|
53
|
+
class SyncTaskResult(AnaplanModel):
|
54
|
+
source_revision_id: str = Field(description="The ID of the source revision.")
|
55
|
+
target_revision_id: str = Field(description="The ID of the target revision.")
|
56
|
+
successful: bool = Field(description="Whether the sync task was successful or not.")
|
57
|
+
|
58
|
+
|
59
|
+
class SyncTask(TaskSummary):
|
60
|
+
current_step: str = Field(description="The current step of the sync task.")
|
61
|
+
result: SyncTaskResult | None = Field(None, description="The result of the sync task.")
|
62
|
+
|
63
|
+
|
64
|
+
class ReportTaskResult(SyncTaskResult):
|
65
|
+
report_file_url: str = Field(
|
66
|
+
description="The URL of the report file generated by the sync task."
|
67
|
+
)
|
68
|
+
|
69
|
+
|
70
|
+
class ReportTaskError(AnaplanModel):
|
71
|
+
title: str = Field(description="The title of the error.")
|
72
|
+
message: str = Field(validation_alias="messageText", description="The message of the error.")
|
73
|
+
|
74
|
+
|
75
|
+
class ReportTaskFailureResult(AnaplanModel):
|
76
|
+
successful: bool = Field(description="Whether the sync task was successful or not.")
|
77
|
+
error: ReportTaskError = Field(description="The error that occurred during the sync task.")
|
78
|
+
|
79
|
+
|
80
|
+
class ReportTask(SyncTask):
|
81
|
+
result: ReportTaskResult | ReportTaskFailureResult | None = Field(
|
82
|
+
None, description="The result of the comparison report task, including the report file URL."
|
83
|
+
)
|
84
|
+
|
85
|
+
|
86
|
+
class SummaryTotals(AnaplanModel):
|
87
|
+
modified: int = Field(0, description="The number of modified items.")
|
88
|
+
deleted: int = Field(0, description="The number of deleted items.")
|
89
|
+
created: int = Field(0, description="The number of created items.")
|
90
|
+
|
91
|
+
|
92
|
+
class SummaryDifferences(AnaplanModel):
|
93
|
+
line_items: SummaryTotals = Field(
|
94
|
+
SummaryTotals(modified=0, deleted=0, created=0), description="Changes in line items."
|
95
|
+
)
|
96
|
+
roles_contents: SummaryTotals = Field(
|
97
|
+
SummaryTotals(modified=0, deleted=0, created=0), description="Changes in roles contents."
|
98
|
+
)
|
99
|
+
lists: SummaryTotals = Field(
|
100
|
+
SummaryTotals(modified=0, deleted=0, created=0), description="Changes in lists."
|
101
|
+
)
|
102
|
+
modules: SummaryTotals = Field(
|
103
|
+
SummaryTotals(modified=0, deleted=0, created=0), description="Changes in modules."
|
104
|
+
)
|
105
|
+
|
106
|
+
|
107
|
+
class SummaryReport(AnaplanModel):
|
108
|
+
target_revision_id: str = Field(description="The ID of the target revision.")
|
109
|
+
source_revision_id: str = Field(description="The ID of the source revision.")
|
110
|
+
totals: SummaryTotals = Field(description="The total counts of changes.")
|
111
|
+
differences: SummaryDifferences = Field(
|
112
|
+
description="The detailed breakdown of changes by category."
|
113
|
+
)
|
anaplan_sdk/models/_bulk.py
CHANGED
@@ -1,7 +1,6 @@
|
|
1
1
|
from typing import Literal, TypeAlias
|
2
2
|
|
3
|
-
from pydantic import
|
4
|
-
from pydantic.alias_generators import to_camel
|
3
|
+
from pydantic import Field, field_validator
|
5
4
|
|
6
5
|
from ._base import AnaplanModel
|
7
6
|
|
@@ -40,11 +39,13 @@ class Model(AnaplanModel):
|
|
40
39
|
description="The unique identifier of the user who last modified this model."
|
41
40
|
)
|
42
41
|
memory_usage: int = Field(0, description="The memory usage of this model in bytes.")
|
43
|
-
|
44
|
-
|
42
|
+
workspace_id: str = Field(
|
43
|
+
validation_alias="currentWorkspaceId",
|
44
|
+
description="The unique identifier of the workspace that this model is currently in.",
|
45
45
|
)
|
46
|
-
|
47
|
-
|
46
|
+
workspace_name: str = Field(
|
47
|
+
validation_alias="currentWorkspaceName",
|
48
|
+
description="The name of the workspace that this model is currently in.",
|
48
49
|
)
|
49
50
|
url: str = Field(validation_alias="modelUrl", description="The current URL of this model.")
|
50
51
|
category_values: list = Field(description="The category values of this model.")
|
@@ -164,13 +165,21 @@ class TaskResult(AnaplanModel):
|
|
164
165
|
)
|
165
166
|
|
166
167
|
|
167
|
-
class TaskStatus(
|
168
|
-
model_config = ConfigDict(alias_generator=to_camel, populate_by_name=True)
|
169
|
-
id: str = Field(validation_alias="taskId", description="The unique identifier of this task.")
|
170
|
-
task_state: Literal["NOT_STARTED", "IN_PROGRESS", "COMPLETE"] = Field(
|
171
|
-
description="The state of this task."
|
172
|
-
)
|
173
|
-
creation_time: int = Field(description="Unix timestamp of when this task was created.")
|
168
|
+
class TaskStatus(TaskSummary):
|
174
169
|
progress: float = Field(description="The progress of this task as a float between 0 and 1.")
|
175
170
|
current_step: str | None = Field(None, description="The current step of this task.")
|
176
171
|
result: TaskResult | None = Field(None)
|
172
|
+
|
173
|
+
|
174
|
+
class DeletionFailure(AnaplanModel):
|
175
|
+
model_id: str = Field(description="The unique identifier of the model that failed to delete.")
|
176
|
+
message: str = Field(description="The error message explaining why the deletion failed.")
|
177
|
+
|
178
|
+
|
179
|
+
class ModelDeletionResult(AnaplanModel):
|
180
|
+
models_deleted: int = Field(description="The number of models that were successfully deleted.")
|
181
|
+
failures: list[DeletionFailure] = Field(
|
182
|
+
[],
|
183
|
+
validation_alias="bulkDeleteModelsFailures",
|
184
|
+
description="List of models that failed to delete with their error messages.",
|
185
|
+
)
|