anaplan-sdk 0.4.4a4__py3-none-any.whl → 0.5.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -15,6 +15,7 @@ from anaplan_sdk.models import (
15
15
  File,
16
16
  Import,
17
17
  Model,
18
+ ModelDeletionResult,
18
19
  Process,
19
20
  TaskStatus,
20
21
  TaskSummary,
@@ -26,7 +27,6 @@ from ._audit import _AsyncAuditClient
26
27
  from ._cloud_works import _AsyncCloudWorksClient
27
28
  from ._transactional import _AsyncTransactionalClient
28
29
 
29
- logging.getLogger("httpx").setLevel(logging.CRITICAL)
30
30
  logger = logging.getLogger("anaplan_sdk")
31
31
 
32
32
 
@@ -107,13 +107,17 @@ class AsyncClient(_AsyncBaseClient):
107
107
  ),
108
108
  timeout=timeout,
109
109
  )
110
+ self._workspace_id = workspace_id
111
+ self._model_id = model_id
110
112
  self._retry_count = retry_count
111
113
  self._url = f"https://api.anaplan.com/2/0/workspaces/{workspace_id}/models/{model_id}"
112
114
  self._transactional_client = (
113
115
  _AsyncTransactionalClient(_client, model_id, retry_count) if model_id else None
114
116
  )
115
117
  self._alm_client = (
116
- _AsyncAlmClient(_client, model_id, self._retry_count) if model_id else None
118
+ _AsyncAlmClient(_client, model_id, self._retry_count, status_poll_delay)
119
+ if model_id
120
+ else None
117
121
  )
118
122
  self._audit = _AsyncAuditClient(_client, self._retry_count)
119
123
  self._cloud_works = _AsyncCloudWorksClient(_client, self._retry_count)
@@ -123,23 +127,37 @@ class AsyncClient(_AsyncBaseClient):
123
127
  super().__init__(retry_count, _client)
124
128
 
125
129
  @classmethod
126
- def from_existing(cls, existing: Self, workspace_id: str, model_id: str) -> Self:
130
+ def from_existing(
131
+ cls, existing: Self, *, workspace_id: str | None = None, model_id: str | None = None
132
+ ) -> Self:
127
133
  """
128
134
  Create a new instance of the Client from an existing instance. This is useful if you want
129
135
  to interact with multiple models or workspaces in the same script but share the same
130
136
  authentication and configuration. This creates a shallow copy of the existing client and
131
- update the relevant attributes to the new workspace and model.
137
+ optionally updates the relevant attributes to the new workspace and model. You can provide
138
+ either a new workspace Id or a new model Id, or both. If you do not provide one of them,
139
+ the existing value will be used. If you omit both, the new instance will be an identical
140
+ copy of the existing instance.
141
+
132
142
  :param existing: The existing instance to copy.
133
- :param workspace_id: The workspace Id to use.
134
- :param model_id: The model Id to use.
143
+ :param workspace_id: The workspace Id to use or None to use the existing workspace Id.
144
+ :param model_id: The model Id to use or None to use the existing model Id.
135
145
  :return: A new instance of the Client.
136
146
  """
137
147
  client = copy(existing)
138
- client._url = f"https://api.anaplan.com/2/0/workspaces/{workspace_id}/models/{model_id}"
148
+ new_ws_id = workspace_id or existing._workspace_id
149
+ new_model_id = model_id or existing._model_id
150
+ logger.debug(
151
+ f"Creating a new AsyncClient from existing instance "
152
+ f"with workspace_id={new_ws_id}, model_id={new_model_id}."
153
+ )
154
+ client._url = f"https://api.anaplan.com/2/0/workspaces/{new_ws_id}/models/{new_model_id}"
139
155
  client._transactional_client = _AsyncTransactionalClient(
140
- existing._client, model_id, existing._retry_count
156
+ existing._client, new_model_id, existing._retry_count
157
+ )
158
+ client._alm_client = _AsyncAlmClient(
159
+ existing._client, new_model_id, existing._retry_count, existing.status_poll_delay
141
160
  )
142
- client._alm_client = _AsyncAlmClient(existing._client, model_id, existing._retry_count)
143
161
  return client
144
162
 
145
163
  @property
@@ -197,7 +215,7 @@ class AsyncClient(_AsyncBaseClient):
197
215
  )
198
216
  return self._alm_client
199
217
 
200
- async def list_workspaces(self, search_pattern: str | None = None) -> list[Workspace]:
218
+ async def get_workspaces(self, search_pattern: str | None = None) -> list[Workspace]:
201
219
  """
202
220
  Lists all the Workspaces the authenticated user has access to.
203
221
  :param search_pattern: Optionally filter for specific workspaces. When provided,
@@ -216,13 +234,13 @@ class AsyncClient(_AsyncBaseClient):
216
234
  )
217
235
  ]
218
236
 
219
- async def list_models(self, search_pattern: str | None = None) -> list[Model]:
237
+ async def get_models(self, search_pattern: str | None = None) -> list[Model]:
220
238
  """
221
239
  Lists all the Models the authenticated user has access to.
222
240
  :param search_pattern: Optionally filter for specific models. When provided,
223
- case-insensitive matches models names containing this string.
241
+ case-insensitive matches model names containing this string.
224
242
  You can use the wildcards `%` for 0-n characters, and `_` for exactly 1 character.
225
- When None (default), returns all users.
243
+ When None (default), returns all models.
226
244
  :return: The List of Models.
227
245
  """
228
246
  params = {"modelDetails": "true"}
@@ -235,7 +253,21 @@ class AsyncClient(_AsyncBaseClient):
235
253
  )
236
254
  ]
237
255
 
238
- async def list_files(self) -> list[File]:
256
+ async def delete_models(self, model_ids: list[str]) -> ModelDeletionResult:
257
+ """
258
+ Delete the given Models. Models need to be closed before they can be deleted. If one of the
259
+ deletions fails, the other deletions will still be attempted and may complete.
260
+ :param model_ids: The list of Model identifiers to delete.
261
+ :return:
262
+ """
263
+ logger.info(f"Deleting Models: {', '.join(model_ids)}.")
264
+ res = await self._post(
265
+ f"https://api.anaplan.com/2/0/workspaces/{self._workspace_id}/bulkDeleteModels",
266
+ json={"modelIdsToDelete": model_ids},
267
+ )
268
+ return ModelDeletionResult.model_validate(res)
269
+
270
+ async def get_files(self) -> list[File]:
239
271
  """
240
272
  Lists all the Files in the Model.
241
273
  :return: The List of Files.
@@ -244,7 +276,7 @@ class AsyncClient(_AsyncBaseClient):
244
276
  File.model_validate(e) for e in await self._get_paginated(f"{self._url}/files", "files")
245
277
  ]
246
278
 
247
- async def list_actions(self) -> list[Action]:
279
+ async def get_actions(self) -> list[Action]:
248
280
  """
249
281
  Lists all the Actions in the Model. This will only return the Actions listed under
250
282
  `Other Actions` in Anaplan. For Imports, exports, and processes, see their respective
@@ -256,7 +288,7 @@ class AsyncClient(_AsyncBaseClient):
256
288
  for e in await self._get_paginated(f"{self._url}/actions", "actions")
257
289
  ]
258
290
 
259
- async def list_processes(self) -> list[Process]:
291
+ async def get_processes(self) -> list[Process]:
260
292
  """
261
293
  Lists all the Processes in the Model.
262
294
  :return: The List of Processes.
@@ -266,7 +298,7 @@ class AsyncClient(_AsyncBaseClient):
266
298
  for e in await self._get_paginated(f"{self._url}/processes", "processes")
267
299
  ]
268
300
 
269
- async def list_imports(self) -> list[Import]:
301
+ async def get_imports(self) -> list[Import]:
270
302
  """
271
303
  Lists all the Imports in the Model.
272
304
  :return: The List of Imports.
@@ -276,7 +308,7 @@ class AsyncClient(_AsyncBaseClient):
276
308
  for e in await self._get_paginated(f"{self._url}/imports", "imports")
277
309
  ]
278
310
 
279
- async def list_exports(self) -> list[Export]:
311
+ async def get_exports(self) -> list[Export]:
280
312
  """
281
313
  Lists all the Exports in the Model.
282
314
  :return: The List of Exports.
@@ -286,33 +318,34 @@ class AsyncClient(_AsyncBaseClient):
286
318
  for e in await self._get_paginated(f"{self._url}/exports", "exports")
287
319
  ]
288
320
 
289
- async def run_action(self, action_id: int) -> TaskStatus:
321
+ async def run_action(self, action_id: int, wait_for_completion: bool = True) -> TaskStatus:
290
322
  """
291
- Runs the specified Anaplan Action and validates the spawned task. If the Action fails or
292
- completes with errors, will raise an :py:class:`AnaplanActionError`. Failed Tasks are
293
- usually not something you can recover from at runtime and often require manual changes in
294
- Anaplan, i.e. updating the mapping of an Import or similar. So, for convenience, this will
295
- raise an Exception to handle - if you for e.g. think that one of the uploaded chunks may
296
- have been dropped and simply retrying with new data may help - and not return the task
297
- status information that needs to be handled by the caller.
298
-
299
- If you need more information or control, you can use `invoke_action()` and
300
- `get_task_status()`.
323
+ Runs the Action and validates the spawned task. If the Action fails or completes with
324
+ errors, this will raise an AnaplanActionError. Failed Tasks are often not something you
325
+ can recover from at runtime and often require manual changes in Anaplan, i.e. updating the
326
+ mapping of an Import or similar.
301
327
  :param action_id: The identifier of the Action to run. Can be any Anaplan Invokable;
302
- Processes, Imports, Exports, Other Actions.
328
+ Processes, Imports, Exports, Other Actions.
329
+ :param wait_for_completion: If True, the method will poll the task status and not return
330
+ until the task is complete. If False, it will spawn the task and return immediately.
303
331
  """
304
- task_id = await self.invoke_action(action_id)
305
- task_status = await self.get_task_status(action_id, task_id)
332
+ body = {"localeName": "en_US"}
333
+ res = await self._post(f"{self._url}/{action_url(action_id)}/{action_id}/tasks", json=body)
334
+ task_id = res["task"]["taskId"]
335
+ logger.info(f"Invoked Action '{action_id}', spawned Task: '{task_id}'.")
306
336
 
307
- while task_status.task_state != "COMPLETE":
337
+ if not wait_for_completion:
338
+ return TaskStatus.model_validate(await self.get_task_status(action_id, task_id))
339
+
340
+ while (status := await self.get_task_status(action_id, task_id)).task_state != "COMPLETE":
308
341
  await sleep(self.status_poll_delay)
309
- task_status = await self.get_task_status(action_id, task_id)
310
342
 
311
- if task_status.task_state == "COMPLETE" and not task_status.result.successful:
343
+ if status.task_state == "COMPLETE" and not status.result.successful:
344
+ logger.error(f"Task '{task_id}' completed with errors: {status.result.error_message}")
312
345
  raise AnaplanActionError(f"Task '{task_id}' completed with errors.")
313
346
 
314
- logger.info(f"Task '{task_id}' completed successfully.")
315
- return task_status
347
+ logger.info(f"Task '{task_id}' of '{action_id}' completed successfully.")
348
+ return status
316
349
 
317
350
  async def get_file(self, file_id: int) -> bytes:
318
351
  """
@@ -321,58 +354,71 @@ class AsyncClient(_AsyncBaseClient):
321
354
  :return: The content of the file.
322
355
  """
323
356
  chunk_count = await self._file_pre_check(file_id)
357
+ logger.info(f"File {file_id} has {chunk_count} chunks.")
324
358
  if chunk_count <= 1:
325
359
  return await self._get_binary(f"{self._url}/files/{file_id}")
326
- logger.info(f"File {file_id} has {chunk_count} chunks.")
327
360
  return b"".join(
328
361
  await gather(
329
- *[
362
+ *(
330
363
  self._get_binary(f"{self._url}/files/{file_id}/chunks/{i}")
331
364
  for i in range(chunk_count)
332
- ]
365
+ )
333
366
  )
334
367
  )
335
368
 
336
- async def get_file_stream(self, file_id: int) -> AsyncIterator[bytes]:
369
+ async def get_file_stream(self, file_id: int, batch_size: int = 1) -> AsyncIterator[bytes]:
337
370
  """
338
371
  Retrieves the content of the specified file as a stream of chunks. The chunks are yielded
339
372
  one by one, so you can process them as they arrive. This is useful for large files where
340
373
  you don't want to or cannot load the entire file into memory at once.
341
374
  :param file_id: The identifier of the file to retrieve.
375
+ :param batch_size: Number of chunks to fetch concurrently. If > 1, n chunks will be fetched
376
+ concurrently. This still yields each chunk individually, only the requests are
377
+ batched. If 1 (default), each chunk is fetched sequentially.
342
378
  :return: A generator yielding the chunks of the file.
343
379
  """
344
380
  chunk_count = await self._file_pre_check(file_id)
381
+ logger.info(f"File {file_id} has {chunk_count} chunks.")
345
382
  if chunk_count <= 1:
346
383
  yield await self._get_binary(f"{self._url}/files/{file_id}")
347
384
  return
348
- logger.info(f"File {file_id} has {chunk_count} chunks.")
349
- for i in range(chunk_count):
350
- yield await self._get_binary(f"{self._url}/files/{file_id}/chunks/{i}")
385
+
386
+ for batch_start in range(0, chunk_count, batch_size):
387
+ batch_chunks = await gather(
388
+ *(
389
+ self._get_binary(f"{self._url}/files/{file_id}/chunks/{i}")
390
+ for i in range(batch_start, min(batch_start + batch_size, chunk_count))
391
+ )
392
+ )
393
+ for chunk in batch_chunks:
394
+ yield chunk
351
395
 
352
396
  async def upload_file(self, file_id: int, content: str | bytes) -> None:
353
397
  """
354
398
  Uploads the content to the specified file. If there are several chunks, upload of
355
- individual chunks are concurrent.
399
+ individual chunks are uploaded concurrently.
356
400
 
357
401
  :param file_id: The identifier of the file to upload to.
358
402
  :param content: The content to upload. **This Content will be compressed before uploading.
359
- If you are passing the Input as bytes, pass it uncompressed to avoid
360
- redundant work.**
403
+ If you are passing the Input as bytes, pass it uncompressed.**
361
404
  """
362
- if isinstance(content, str):
363
- content = content.encode()
364
405
  chunks = [
365
406
  content[i : i + self.upload_chunk_size]
366
407
  for i in range(0, len(content), self.upload_chunk_size)
367
408
  ]
368
- logger.info(f"Content will be uploaded in {len(chunks)} chunks.")
409
+ logger.info(f"Content for file '{file_id}' will be uploaded in {len(chunks)} chunks.")
369
410
  await self._set_chunk_count(file_id, len(chunks))
370
411
  await gather(
371
- *[self._upload_chunk(file_id, index, chunk) for index, chunk in enumerate(chunks)]
412
+ *(self._upload_chunk(file_id, index, chunk) for index, chunk in enumerate(chunks))
372
413
  )
373
414
 
415
+ logger.info(f"Completed upload for file '{file_id}'.")
416
+
374
417
  async def upload_file_stream(
375
- self, file_id: int, content: AsyncIterator[bytes | str] | Iterator[str | bytes]
418
+ self,
419
+ file_id: int,
420
+ content: AsyncIterator[bytes | str] | Iterator[str | bytes],
421
+ batch_size: int = 1,
376
422
  ) -> None:
377
423
  """
378
424
  Uploads the content to the specified file as a stream of chunks. This is useful either for
@@ -382,25 +428,42 @@ class AsyncClient(_AsyncBaseClient):
382
428
  generator that yields the chunks of the file one by one to this method.
383
429
 
384
430
  :param file_id: The identifier of the file to upload to.
385
- :param content: An Iterator or AsyncIterator yielding the chunks of the file.
386
- (Most likely a generator).
431
+ :param content: An Iterator or AsyncIterator yielding the chunks of the file. You can pass
432
+ any Iterator, but you will most likely want to pass a Generator.
433
+ :param batch_size: Number of chunks to upload concurrently. If > 1, n chunks will be
434
+ uploaded concurrently. This can be useful if you either do not control the chunk
435
+ size, or if you want to keep the chunk size small but still want some concurrency.
387
436
  """
437
+ logger.info(f"Starting upload stream for file '{file_id}' with batch size {batch_size}.")
388
438
  await self._set_chunk_count(file_id, -1)
439
+ tasks = []
389
440
  if isinstance(content, Iterator):
390
441
  for index, chunk in enumerate(content):
391
- await self._upload_chunk(
392
- file_id, index, chunk.encode() if isinstance(chunk, str) else chunk
393
- )
442
+ tasks.append(self._upload_chunk(file_id, index, chunk))
443
+ if len(tasks) == max(batch_size, 1):
444
+ await gather(*tasks)
445
+ logger.info(
446
+ f"Completed upload stream batch of size {batch_size} for file {file_id}."
447
+ )
448
+ tasks = []
394
449
  else:
395
450
  index = 0
396
451
  async for chunk in content:
397
- await self._upload_chunk(
398
- file_id, index, chunk.encode() if isinstance(chunk, str) else chunk
399
- )
452
+ tasks.append(self._upload_chunk(file_id, index, chunk))
400
453
  index += 1
401
-
454
+ if len(tasks) == max(batch_size, 1):
455
+ await gather(*tasks)
456
+ logger.info(
457
+ f"Completed upload stream batch of size {batch_size} for file {file_id}."
458
+ )
459
+ tasks = []
460
+ if tasks:
461
+ await gather(*tasks)
462
+ logger.info(
463
+ f"Completed final upload stream batch of size {len(tasks)} for file {file_id}."
464
+ )
402
465
  await self._post(f"{self._url}/files/{file_id}/complete", json={"id": file_id})
403
- logger.info(f"Marked all chunks as complete for file '{file_id}'.")
466
+ logger.info(f"Completed upload stream for '{file_id}'.")
404
467
 
405
468
  async def upload_and_import(self, file_id: int, content: str | bytes, action_id: int) -> None:
406
469
  """
@@ -425,7 +488,7 @@ class AsyncClient(_AsyncBaseClient):
425
488
  await self.run_action(action_id)
426
489
  return await self.get_file(action_id)
427
490
 
428
- async def list_task_status(self, action_id: int) -> list[TaskSummary]:
491
+ async def get_task_summaries(self, action_id: int) -> list[TaskSummary]:
429
492
  """
430
493
  Retrieves the status of all tasks spawned by the specified action.
431
494
  :param action_id: The identifier of the action that was invoked.
@@ -451,34 +514,26 @@ class AsyncClient(_AsyncBaseClient):
451
514
  ).get("task")
452
515
  )
453
516
 
454
- async def invoke_action(self, action_id: int) -> str:
517
+ async def get_optimizer_log(self, action_id: int, task_id: str) -> bytes:
455
518
  """
456
- You may want to consider using `run_action()` instead.
457
-
458
- Invokes the specified Anaplan Action and returns the spawned Task identifier. This is
459
- useful if you want to handle the Task status yourself or if you want to run multiple
460
- Actions in parallel.
461
- :param action_id: The identifier of the Action to run. Can be any Anaplan Invokable.
462
- :return: The identifier of the spawned Task.
519
+ Retrieves the solution logs of the specified optimization action task.
520
+ :param action_id: The identifier of the optimization action that was invoked.
521
+ :param task_id: The Task identifier, sometimes also referred to as the Correlation Id.
522
+ :return: The content of the solution logs.
463
523
  """
464
- response = await self._post(
465
- f"{self._url}/{action_url(action_id)}/{action_id}/tasks", json={"localeName": "en_US"}
524
+ return await self._get_binary(
525
+ f"{self._url}/optimizeActions/{action_id}/tasks/{task_id}/solutionLogs"
466
526
  )
467
- task_id = response.get("task").get("taskId")
468
- logger.info(f"Invoked Action '{action_id}', spawned Task: '{task_id}'.")
469
- return task_id
470
527
 
471
528
  async def _file_pre_check(self, file_id: int) -> int:
472
- file = next(filter(lambda f: f.id == file_id, await self.list_files()), None)
529
+ file = next((f for f in await self.get_files() if f.id == file_id), None)
473
530
  if not file:
474
531
  raise InvalidIdentifierException(f"File {file_id} not found.")
475
532
  return file.chunk_count
476
533
 
477
- async def _upload_chunk(self, file_id: int, index: int, chunk: bytes) -> None:
478
- await self._run_with_retry(
479
- self._put_binary_gzip, f"{self._url}/files/{file_id}/chunks/{index}", content=chunk
480
- )
481
- logger.info(f"Chunk {index} loaded to file '{file_id}'.")
534
+ async def _upload_chunk(self, file_id: int, index: int, chunk: str | bytes) -> None:
535
+ await self._put_binary_gzip(f"{self._url}/files/{file_id}/chunks/{index}", chunk)
536
+ logger.debug(f"Chunk {index} loaded to file '{file_id}'.")
482
537
 
483
538
  async def _set_chunk_count(self, file_id: int, num_chunks: int) -> None:
484
539
  if not self.allow_file_creation and not (113000000000 <= file_id <= 113999999999):
@@ -1,3 +1,4 @@
1
+ import logging
1
2
  from typing import Any, Literal
2
3
 
3
4
  import httpx
@@ -27,6 +28,8 @@ from anaplan_sdk.models.cloud_works import (
27
28
 
28
29
  from ._cw_flow import _AsyncFlowClient
29
30
 
31
+ logger = logging.getLogger("anaplan_sdk")
32
+
30
33
 
31
34
  class _AsyncCloudWorksClient(_AsyncBaseClient):
32
35
  def __init__(self, client: httpx.AsyncClient, retry_count: int) -> None:
@@ -41,7 +44,7 @@ class _AsyncCloudWorksClient(_AsyncBaseClient):
41
44
  """
42
45
  return self._flow
43
46
 
44
- async def list_connections(self) -> list[Connection]:
47
+ async def get_connections(self) -> list[Connection]:
45
48
  """
46
49
  List all Connections available in CloudWorks.
47
50
  :return: A list of connections.
@@ -62,7 +65,9 @@ class _AsyncCloudWorksClient(_AsyncBaseClient):
62
65
  res = await self._post(
63
66
  f"{self._url}/connections", json=construct_payload(ConnectionInput, con_info)
64
67
  )
65
- return res["connections"]["connectionId"]
68
+ connection_id = res["connections"]["connectionId"]
69
+ logger.info(f"Created connection '{connection_id}'.")
70
+ return connection_id
66
71
 
67
72
  async def update_connection(
68
73
  self, con_id: str, con_info: ConnectionBody | dict[str, Any]
@@ -91,8 +96,9 @@ class _AsyncCloudWorksClient(_AsyncBaseClient):
91
96
  :param con_id: The ID of the connection to delete.
92
97
  """
93
98
  await self._delete(f"{self._url}/connections/{con_id}")
99
+ logger.info(f"Deleted connection '{con_id}'.")
94
100
 
95
- async def list_integrations(
101
+ async def get_integrations(
96
102
  self, sort_by_name: Literal["ascending", "descending"] = "ascending"
97
103
  ) -> list[Integration]:
98
104
  """
@@ -143,7 +149,10 @@ class _AsyncCloudWorksClient(_AsyncBaseClient):
143
149
  :return: The ID of the new integration.
144
150
  """
145
151
  json = integration_payload(body)
146
- return (await self._post(f"{self._url}", json=json))["integration"]["integrationId"]
152
+ res = await self._post(f"{self._url}", json=json)
153
+ integration_id = res["integration"]["integrationId"]
154
+ logger.info(f"Created integration '{integration_id}'.")
155
+ return integration_id
147
156
 
148
157
  async def update_integration(
149
158
  self, integration_id: str, body: IntegrationInput | IntegrationProcessInput | dict[str, Any]
@@ -164,7 +173,9 @@ class _AsyncCloudWorksClient(_AsyncBaseClient):
164
173
  :param integration_id: The ID of the integration to run.
165
174
  :return: The ID of the run instance.
166
175
  """
167
- return (await self._post_empty(f"{self._url}/{integration_id}/run"))["run"]["id"]
176
+ run_id = (await self._post_empty(f"{self._url}/{integration_id}/run"))["run"]["id"]
177
+ logger.info(f"Started integration run '{run_id}' for integration '{integration_id}'.")
178
+ return run_id
168
179
 
169
180
  async def delete_integration(self, integration_id: str) -> None:
170
181
  """
@@ -172,6 +183,7 @@ class _AsyncCloudWorksClient(_AsyncBaseClient):
172
183
  :param integration_id: The ID of the integration to delete.
173
184
  """
174
185
  await self._delete(f"{self._url}/{integration_id}")
186
+ logger.info(f"Deleted integration '{integration_id}'.")
175
187
 
176
188
  async def get_run_history(self, integration_id: str) -> list[RunSummary]:
177
189
  """
@@ -218,6 +230,7 @@ class _AsyncCloudWorksClient(_AsyncBaseClient):
218
230
  f"{self._url}/{integration_id}/schedule",
219
231
  json=schedule_payload(integration_id, schedule),
220
232
  )
233
+ logger.info(f"Created schedule for integration '{integration_id}'.")
221
234
 
222
235
  async def update_schedule(
223
236
  self, integration_id: str, schedule: ScheduleInput | dict[str, Any]
@@ -250,6 +263,7 @@ class _AsyncCloudWorksClient(_AsyncBaseClient):
250
263
  :param integration_id: The ID of the integration to schedule.
251
264
  """
252
265
  await self._delete(f"{self._url}/{integration_id}/schedule")
266
+ logger.info(f"Deleted schedule for integration '{integration_id}'.")
253
267
 
254
268
  async def get_notification_config(
255
269
  self, notification_id: str | None = None, integration_id: str | None = None
@@ -285,7 +299,9 @@ class _AsyncCloudWorksClient(_AsyncBaseClient):
285
299
  res = await self._post(
286
300
  f"{self._url}/notification", json=construct_payload(NotificationInput, config)
287
301
  )
288
- return res["notification"]["notificationId"]
302
+ notification_id = res["notification"]["notificationId"]
303
+ logger.info(f"Created notification configuration '{notification_id}'.")
304
+ return notification_id
289
305
 
290
306
  async def update_notification_config(
291
307
  self, notification_id: str, config: NotificationInput | dict[str, Any]
@@ -320,6 +336,7 @@ class _AsyncCloudWorksClient(_AsyncBaseClient):
320
336
  if integration_id:
321
337
  notification_id = (await self.get_integration(integration_id)).notification_id
322
338
  await self._delete(f"{self._url}/notification/{notification_id}")
339
+ logger.info(f"Deleted notification configuration '{notification_id}'.")
323
340
 
324
341
  async def get_import_error_dump(self, run_id: str) -> bytes:
325
342
  """
@@ -1,3 +1,4 @@
1
+ import logging
1
2
  from typing import Any
2
3
 
3
4
  import httpx
@@ -5,13 +6,15 @@ import httpx
5
6
  from anaplan_sdk._base import _AsyncBaseClient, construct_payload
6
7
  from anaplan_sdk.models.flows import Flow, FlowInput, FlowSummary
7
8
 
9
+ logger = logging.getLogger("anaplan_sdk")
10
+
8
11
 
9
12
  class _AsyncFlowClient(_AsyncBaseClient):
10
13
  def __init__(self, client: httpx.AsyncClient, retry_count: int) -> None:
11
14
  self._url = "https://api.cloudworks.anaplan.com/2/0/integrationflows"
12
15
  super().__init__(retry_count, client)
13
16
 
14
- async def list_flows(self, current_user_only: bool = False) -> list[FlowSummary]:
17
+ async def get_flows(self, current_user_only: bool = False) -> list[FlowSummary]:
15
18
  """
16
19
  List all flows in CloudWorks.
17
20
  :param current_user_only: Filters the flows to only those created by the current user.
@@ -49,7 +52,9 @@ class _AsyncFlowClient(_AsyncBaseClient):
49
52
  if only_steps
50
53
  else self._post_empty(url)
51
54
  )
52
- return res["run"]["id"]
55
+ run_id = res["run"]["id"]
56
+ logger.info(f"Started flow run '{run_id}' for flow '{flow_id}'.")
57
+ return run_id
53
58
 
54
59
  async def create_flow(self, flow: FlowInput | dict[str, Any]) -> str:
55
60
  """
@@ -60,7 +65,9 @@ class _AsyncFlowClient(_AsyncBaseClient):
60
65
  :return: The ID of the created flow.
61
66
  """
62
67
  res = await self._post(self._url, json=construct_payload(FlowInput, flow))
63
- return res["integrationFlow"]["integrationFlowId"]
68
+ flow_id = res["integrationFlow"]["integrationFlowId"]
69
+ logger.info(f"Created flow '{flow_id}'.")
70
+ return flow_id
64
71
 
65
72
  async def update_flow(self, flow_id: str, flow: FlowInput | dict[str, Any]) -> None:
66
73
  """
@@ -70,6 +77,7 @@ class _AsyncFlowClient(_AsyncBaseClient):
70
77
  :param flow: The flow to update. This can be a FlowInput object or a dictionary.
71
78
  """
72
79
  await self._put(f"{self._url}/{flow_id}", json=construct_payload(FlowInput, flow))
80
+ logger.info(f"Updated flow '{flow_id}'.")
73
81
 
74
82
  async def delete_flow(self, flow_id: str) -> None:
75
83
  """
@@ -78,3 +86,4 @@ class _AsyncFlowClient(_AsyncBaseClient):
78
86
  :param flow_id: The ID of the flow to delete.
79
87
  """
80
88
  await self._delete(f"{self._url}/{flow_id}")
89
+ logger.info(f"Deleted flow '{flow_id}'.")