sprocket-systems.coda.sdk 2.0.10__tar.gz → 2.0.12__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {sprocket_systems_coda_sdk-2.0.10 → sprocket_systems_coda_sdk-2.0.12}/PKG-INFO +1 -1
- {sprocket_systems_coda_sdk-2.0.10 → sprocket_systems_coda_sdk-2.0.12}/pyproject.toml +1 -1
- {sprocket_systems_coda_sdk-2.0.10 → sprocket_systems_coda_sdk-2.0.12}/src/coda/__init__.py +2 -2
- {sprocket_systems_coda_sdk-2.0.10 → sprocket_systems_coda_sdk-2.0.12}/src/coda/sdk/__init__.py +16 -0
- {sprocket_systems_coda_sdk-2.0.10 → sprocket_systems_coda_sdk-2.0.12}/src/coda/sdk/essence.py +110 -13
- sprocket_systems_coda_sdk-2.0.12/src/coda/sdk/exceptions.py +49 -0
- {sprocket_systems_coda_sdk-2.0.10 → sprocket_systems_coda_sdk-2.0.12}/src/coda/sdk/job.py +168 -56
- {sprocket_systems_coda_sdk-2.0.10 → sprocket_systems_coda_sdk-2.0.12}/src/coda/sdk/preset.py +1 -4
- {sprocket_systems_coda_sdk-2.0.10 → sprocket_systems_coda_sdk-2.0.12}/src/coda/sdk/utils.py +157 -2
- {sprocket_systems_coda_sdk-2.0.10 → sprocket_systems_coda_sdk-2.0.12}/src/coda/sdk/workflow.py +2 -2
- {sprocket_systems_coda_sdk-2.0.10 → sprocket_systems_coda_sdk-2.0.12}/LICENSE +0 -0
- {sprocket_systems_coda_sdk-2.0.10 → sprocket_systems_coda_sdk-2.0.12}/PYPI_README.md +0 -0
- {sprocket_systems_coda_sdk-2.0.10 → sprocket_systems_coda_sdk-2.0.12}/src/coda/sdk/constants.py +0 -0
- {sprocket_systems_coda_sdk-2.0.10 → sprocket_systems_coda_sdk-2.0.12}/src/coda/sdk/enums.py +0 -0
- {sprocket_systems_coda_sdk-2.0.10 → sprocket_systems_coda_sdk-2.0.12}/src/coda/sdk.py +0 -0
- {sprocket_systems_coda_sdk-2.0.10 → sprocket_systems_coda_sdk-2.0.12}/src/coda/tc_tools.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: sprocket-systems.coda.sdk
|
|
3
|
-
Version: 2.0.
|
|
3
|
+
Version: 2.0.12
|
|
4
4
|
Summary: The Coda SDK provides a Python interface to define Coda workflows, create jobs and run them.
|
|
5
5
|
Keywords: python,coda,sdk
|
|
6
6
|
Author-Email: Sprocket Systems <support@sprocket.systems>
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
# The versions below will be replaced automatically in CI.
|
|
2
2
|
# You do not need to modify any of the versions below.
|
|
3
|
-
__version__ = "2.0.
|
|
4
|
-
CODA_APP_SUITE_VERSION = "+coda-2.0.
|
|
3
|
+
__version__ = "2.0.12"
|
|
4
|
+
CODA_APP_SUITE_VERSION = "+coda-2.0.17"
|
|
5
5
|
FINAL_VERSION = __version__ + CODA_APP_SUITE_VERSION
|
{sprocket_systems_coda_sdk-2.0.10 → sprocket_systems_coda_sdk-2.0.12}/src/coda/sdk/__init__.py
RENAMED
|
@@ -4,6 +4,15 @@ from .workflow import WorkflowDefinition, WorkflowDefinitionBuilder
|
|
|
4
4
|
from .preset import Preset
|
|
5
5
|
from .enums import PresetType, SourceType, VenueType, InputFilter, Language, Format, StemType, FrameRate, InputStemType
|
|
6
6
|
from .utils import user_info, timing_info, get_channels
|
|
7
|
+
from .exceptions import (
|
|
8
|
+
CodaAPIError,
|
|
9
|
+
CodaAuthenticationError,
|
|
10
|
+
CodaForbiddenError,
|
|
11
|
+
CodaBadRequestError,
|
|
12
|
+
CodaNotFoundError,
|
|
13
|
+
CodaClientError,
|
|
14
|
+
CodaServerError,
|
|
15
|
+
)
|
|
7
16
|
|
|
8
17
|
__all__ = [
|
|
9
18
|
"Job",
|
|
@@ -24,4 +33,11 @@ __all__ = [
|
|
|
24
33
|
"user_info",
|
|
25
34
|
"get_channels",
|
|
26
35
|
"timing_info",
|
|
36
|
+
"CodaAPIError",
|
|
37
|
+
"CodaAuthenticationError",
|
|
38
|
+
"CodaForbiddenError",
|
|
39
|
+
"CodaBadRequestError",
|
|
40
|
+
"CodaNotFoundError",
|
|
41
|
+
"CodaClientError",
|
|
42
|
+
"CodaServerError",
|
|
27
43
|
]
|
{sprocket_systems_coda_sdk-2.0.10 → sprocket_systems_coda_sdk-2.0.12}/src/coda/sdk/essence.py
RENAMED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
from numbers import Number
|
|
1
2
|
import os
|
|
2
3
|
import sys
|
|
3
4
|
import json
|
|
@@ -5,7 +6,7 @@ import shutil
|
|
|
5
6
|
import subprocess
|
|
6
7
|
|
|
7
8
|
from pathlib import Path
|
|
8
|
-
from typing import List, Dict
|
|
9
|
+
from typing import List, Dict, Any
|
|
9
10
|
from .enums import Format, SourceType, InputStemType, FrameRate, Language
|
|
10
11
|
from .constants import (
|
|
11
12
|
ENV_CODA_CLI_EXE,
|
|
@@ -58,7 +59,7 @@ class Essence:
|
|
|
58
59
|
if not format or not isinstance(format, str):
|
|
59
60
|
raise ValueError("format must not be an empty string and must be a string type.")
|
|
60
61
|
|
|
61
|
-
self.payload = {
|
|
62
|
+
self.payload: Dict[str, Any] = {
|
|
62
63
|
"type": "",
|
|
63
64
|
"definition": {
|
|
64
65
|
"format": format,
|
|
@@ -75,6 +76,8 @@ class Essence:
|
|
|
75
76
|
self.payload["definition"]["ffoa_timecode"] = timing_info.get("ffoa_timecode")
|
|
76
77
|
if timing_info.get("lfoa_timecode"):
|
|
77
78
|
self.payload["definition"]["lfoa_timecode"] = timing_info.get("lfoa_timecode")
|
|
79
|
+
if timing_info.get("file_start_timecode"):
|
|
80
|
+
self.payload["definition"]["file_start_timecode"] = timing_info.get("file_start_timecode")
|
|
78
81
|
|
|
79
82
|
def add_interleaved_resource(
|
|
80
83
|
self,
|
|
@@ -126,7 +129,7 @@ class Essence:
|
|
|
126
129
|
raise ValueError("IO Location ID is required for non-S3 file sources.")
|
|
127
130
|
url = f"{URL_PREFIX_IO}{io_location_id}{url}"
|
|
128
131
|
|
|
129
|
-
resource_dict = {"url": url}
|
|
132
|
+
resource_dict: Dict[str, Any] = {"url": url}
|
|
130
133
|
if auth is not None:
|
|
131
134
|
resource_dict["auth"] = auth
|
|
132
135
|
if opts is not None:
|
|
@@ -213,7 +216,7 @@ class Essence:
|
|
|
213
216
|
raise ValueError("IO Location ID is required for non-S3 file sources.")
|
|
214
217
|
url = f"{URL_PREFIX_IO}{io_location_id}{url}"
|
|
215
218
|
|
|
216
|
-
res = {
|
|
219
|
+
res: Dict[str, Any] = {
|
|
217
220
|
"resource": {"url": url},
|
|
218
221
|
"bit_depth": bit_depth,
|
|
219
222
|
"sample_rate": sample_rate,
|
|
@@ -270,25 +273,113 @@ class Essence:
|
|
|
270
273
|
self,
|
|
271
274
|
frame_rate: FrameRate | None = None,
|
|
272
275
|
ffoa_timecode: str | None = None,
|
|
273
|
-
lfoa_timecode: str | None = None
|
|
276
|
+
lfoa_timecode: str | None = None,
|
|
277
|
+
file_start_timecode: str | None = None,
|
|
278
|
+
head_leader_length: int | None = None,
|
|
279
|
+
tail_leader_length: int | None = None
|
|
274
280
|
) -> None:
|
|
275
281
|
"""Override timing information for the essence.
|
|
276
282
|
|
|
277
|
-
|
|
283
|
+
Timing parameters come in two mutually exclusive groups. When using either group,
|
|
284
|
+
ALL parameters in that group must be provided:
|
|
285
|
+
- Timecode-based: frame_rate, ffoa_timecode, lfoa_timecode, file_start_timecode (all four required together)
|
|
286
|
+
- Offset-based: frame_rate, head_leader_length, tail_leader_length (all three required together)
|
|
278
287
|
|
|
279
288
|
Args:
|
|
280
|
-
frame_rate: The source frame rate
|
|
289
|
+
frame_rate (FrameRate, optional): The source frame rate. Required with all timing groups.
|
|
281
290
|
ffoa_timecode (str, optional): First frame of action timecode.
|
|
282
291
|
lfoa_timecode (str, optional): Last frame of action timecode.
|
|
292
|
+
file_start_timecode (str, optional): File start timecode.
|
|
293
|
+
head_leader_length (int, optional): Head leader length in frames.
|
|
294
|
+
tail_leader_length (int, optional): Tail leader length in frames.
|
|
295
|
+
|
|
296
|
+
Raises:
|
|
297
|
+
ValueError: If both timecode-based and offset-based parameters are provided.
|
|
298
|
+
ValueError: If only some parameters from a group are provided.
|
|
283
299
|
|
|
284
300
|
"""
|
|
301
|
+
# Define parameter groups (excluding frame_rate from the "other params" check)
|
|
302
|
+
timecode_other_params = {
|
|
303
|
+
"ffoa_timecode": ffoa_timecode,
|
|
304
|
+
"lfoa_timecode": lfoa_timecode,
|
|
305
|
+
"file_start_timecode": file_start_timecode
|
|
306
|
+
}
|
|
307
|
+
|
|
308
|
+
offset_other_params = {
|
|
309
|
+
"head_leader_length": head_leader_length,
|
|
310
|
+
"tail_leader_length": tail_leader_length
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
timecode_other_set = [k for k, v in timecode_other_params.items() if v is not None]
|
|
314
|
+
offset_other_set = [k for k, v in offset_other_params.items() if v is not None]
|
|
315
|
+
|
|
316
|
+
# Check if parameters from both groups are provided
|
|
317
|
+
if timecode_other_set and offset_other_set:
|
|
318
|
+
raise ValueError(
|
|
319
|
+
"Timecode-based parameters (frame_rate, ffoa_timecode, lfoa_timecode, file_start_timecode) "
|
|
320
|
+
"and offset-based parameters (frame_rate, head_leader_length, tail_leader_length) are mutually exclusive. "
|
|
321
|
+
"Please provide only one type of timing parameter."
|
|
322
|
+
)
|
|
323
|
+
|
|
324
|
+
# Validate timecode group - if any timecode param is provided, all must be provided (including frame_rate)
|
|
325
|
+
if timecode_other_set:
|
|
326
|
+
full_timecode_params = {
|
|
327
|
+
"frame_rate": frame_rate,
|
|
328
|
+
"ffoa_timecode": ffoa_timecode,
|
|
329
|
+
"lfoa_timecode": lfoa_timecode,
|
|
330
|
+
"file_start_timecode": file_start_timecode
|
|
331
|
+
}
|
|
332
|
+
complete_timecode_set = [k for k, v in full_timecode_params.items() if v is not None]
|
|
333
|
+
if len(complete_timecode_set) != len(full_timecode_params):
|
|
334
|
+
missing = [k for k, v in full_timecode_params.items() if v is None]
|
|
335
|
+
raise ValueError(
|
|
336
|
+
f"When using timecode-based parameters, all must be provided. "
|
|
337
|
+
f"Missing: {', '.join(missing)}"
|
|
338
|
+
)
|
|
339
|
+
|
|
340
|
+
# Validate offset group - if any offset param is provided, all must be provided (including frame_rate)
|
|
341
|
+
if offset_other_set:
|
|
342
|
+
full_offset_params = {
|
|
343
|
+
"frame_rate": frame_rate,
|
|
344
|
+
"head_leader_length": head_leader_length,
|
|
345
|
+
"tail_leader_length": tail_leader_length
|
|
346
|
+
}
|
|
347
|
+
complete_offset_set = [k for k, v in full_offset_params.items() if v is not None]
|
|
348
|
+
if len(complete_offset_set) != len(full_offset_params):
|
|
349
|
+
missing = [k for k, v in full_offset_params.items() if v is None]
|
|
350
|
+
raise ValueError(
|
|
351
|
+
f"When using offset-based parameters, all must be provided. "
|
|
352
|
+
f"Missing: {', '.join(missing)}"
|
|
353
|
+
)
|
|
354
|
+
|
|
355
|
+
# Check if frame_rate is provided alone (without any other timing params)
|
|
356
|
+
if frame_rate is not None and not timecode_other_set and not offset_other_set:
|
|
357
|
+
raise ValueError(
|
|
358
|
+
"frame_rate cannot be used alone. It must be provided as part of either: "
|
|
359
|
+
"timecode-based parameters (frame_rate, ffoa_timecode, lfoa_timecode, file_start_timecode) or "
|
|
360
|
+
"offset-based parameters (frame_rate, head_leader_length, tail_leader_length)."
|
|
361
|
+
)
|
|
362
|
+
|
|
285
363
|
if frame_rate is not None:
|
|
286
364
|
fr_value = frame_rate.value if hasattr(frame_rate, 'value') else frame_rate
|
|
287
365
|
self.payload["definition"]["frame_rate"] = fr_value
|
|
366
|
+
|
|
367
|
+
# For timecode based settings
|
|
288
368
|
if ffoa_timecode is not None:
|
|
289
369
|
self.payload["definition"]["ffoa_timecode"] = ffoa_timecode
|
|
290
|
-
if lfoa_timecode is not None:
|
|
291
370
|
self.payload["definition"]["lfoa_timecode"] = lfoa_timecode
|
|
371
|
+
self.payload["definition"]["file_start_timecode"] = file_start_timecode
|
|
372
|
+
|
|
373
|
+
# For offset based settings
|
|
374
|
+
if head_leader_length is not None:
|
|
375
|
+
# Remove any set timecode settings just in case
|
|
376
|
+
self.payload["definition"].pop("ffoa_timecode", None)
|
|
377
|
+
self.payload["definition"].pop("lfoa_timecode", None)
|
|
378
|
+
self.payload["definition"].pop("file_start_timecode", None)
|
|
379
|
+
|
|
380
|
+
# Set the offest values
|
|
381
|
+
self.payload["definition"]["head_leader_length"] = head_leader_length
|
|
382
|
+
self.payload["definition"]["tail_leader_length"] = tail_leader_length
|
|
292
383
|
|
|
293
384
|
def override_bext_time_reference(self, bext_time_reference: int) -> None:
|
|
294
385
|
"""Set BEXT time reference on all resources.
|
|
@@ -311,6 +402,8 @@ class Essence:
|
|
|
311
402
|
) -> list["Essence"]:
|
|
312
403
|
"""Create a list of CodaEssence objects from files.
|
|
313
404
|
|
|
405
|
+
Note: this method only supports multi-mono file sets.
|
|
406
|
+
|
|
314
407
|
This method inspects local files using the 'coda inspect' command-line tool
|
|
315
408
|
to automatically determine their properties. For S3 files or when the CLI
|
|
316
409
|
is unavailable, it relies on the `file_info` dictionary for manual creation.
|
|
@@ -404,23 +497,27 @@ class Essence:
|
|
|
404
497
|
)
|
|
405
498
|
|
|
406
499
|
j = json.loads(ret.stdout)
|
|
407
|
-
print(json.dumps(j, indent=2))
|
|
408
500
|
if not j.get("sources"):
|
|
409
501
|
raise ValueError("`coda inspect` was unable to retrieve the sources information.")
|
|
410
502
|
|
|
411
503
|
timing_info = {
|
|
412
504
|
"frame_rate": j.get("source_frame_rate"),
|
|
413
505
|
"ffoa_timecode": j.get("ffoa_timecode"),
|
|
414
|
-
"lfoa_timecode": j.get("lfoa_timecode")
|
|
506
|
+
"lfoa_timecode": j.get("lfoa_timecode"),
|
|
507
|
+
"file_start_timecode": j.get("file_start_timecode")
|
|
415
508
|
}
|
|
416
509
|
|
|
417
510
|
for source in j.get("sources", []):
|
|
418
511
|
source_type = source.get("type")
|
|
419
|
-
if source_type in [SourceType.ADM, SourceType.IAB_MXF]:
|
|
420
|
-
format = Format.ATMOS
|
|
421
512
|
source_def = source.get("definition")
|
|
513
|
+
|
|
514
|
+
# Determine the format with explicit type handling
|
|
515
|
+
format_value: str | Format = source_def.get("format", "")
|
|
516
|
+
if not format_value and source_type in [SourceType.ADM, SourceType.IAB_MXF]:
|
|
517
|
+
format_value = Format.ATMOS
|
|
518
|
+
|
|
422
519
|
essence = Essence(
|
|
423
|
-
format=
|
|
520
|
+
format=format_value,
|
|
424
521
|
stem_type=source_def.get("type"),
|
|
425
522
|
program=source_def.get("program", program),
|
|
426
523
|
description=source_def.get("description"),
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
"""Exception classes for Coda API errors."""
|
|
2
|
+
|
|
3
|
+
import requests
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class CodaAPIError(Exception):
|
|
7
|
+
"""Base exception for Coda API errors.
|
|
8
|
+
|
|
9
|
+
Attributes:
|
|
10
|
+
status_code: HTTP status code
|
|
11
|
+
response: The full requests.Response object
|
|
12
|
+
endpoint: The API endpoint that was called
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
def __init__(self, message: str, status_code: int, response: requests.Response, endpoint: str):
|
|
16
|
+
self.status_code = status_code
|
|
17
|
+
self.response = response
|
|
18
|
+
self.endpoint = endpoint
|
|
19
|
+
super().__init__(message)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class CodaAuthenticationError(CodaAPIError):
|
|
23
|
+
"""401 Unauthorized - Invalid or expired API token."""
|
|
24
|
+
pass
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class CodaForbiddenError(CodaAPIError):
|
|
28
|
+
"""403 Forbidden - Insufficient permissions for this resource."""
|
|
29
|
+
pass
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class CodaBadRequestError(CodaAPIError):
|
|
33
|
+
"""400 Bad Request - Invalid request payload or parameters."""
|
|
34
|
+
pass
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class CodaNotFoundError(CodaAPIError):
|
|
38
|
+
"""404 Not Found - Resource does not exist."""
|
|
39
|
+
pass
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class CodaClientError(CodaAPIError):
|
|
43
|
+
"""4XX Client Error (other than 400, 401, 403, 404)."""
|
|
44
|
+
pass
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
class CodaServerError(CodaAPIError):
|
|
48
|
+
"""5XX Server Error - Coda API server error."""
|
|
49
|
+
pass
|
|
@@ -10,6 +10,7 @@ from coda.sdk.enums import Format, FrameRate, Language, VenueType
|
|
|
10
10
|
from .constants import DEFAULT_PROGRAM_ID
|
|
11
11
|
from .essence import Essence
|
|
12
12
|
from .utils import validate_group_id, make_request
|
|
13
|
+
from .exceptions import CodaServerError, CodaClientError
|
|
13
14
|
from ..tc_tools import tc_to_time_seconds
|
|
14
15
|
|
|
15
16
|
if TYPE_CHECKING:
|
|
@@ -73,25 +74,114 @@ class JobPayloadBuilder:
|
|
|
73
74
|
return self
|
|
74
75
|
|
|
75
76
|
def with_input_timing(
|
|
76
|
-
self,
|
|
77
|
+
self,
|
|
78
|
+
frame_rate: FrameRate | None = None,
|
|
79
|
+
ffoa: str | None = None,
|
|
80
|
+
lfoa: str | None = None,
|
|
81
|
+
start_time: str | None = None,
|
|
82
|
+
head_leader_length: int | None = None,
|
|
83
|
+
tail_leader_length: int | None = None
|
|
77
84
|
) -> "JobPayloadBuilder":
|
|
78
85
|
"""Set the input timing information for the source files.
|
|
79
86
|
|
|
87
|
+
Note: This will override the timing info for every defined essence.
|
|
88
|
+
|
|
89
|
+
Timing parameters come in two mutually exclusive groups. When using either group,
|
|
90
|
+
ALL parameters in that group must be provided:
|
|
91
|
+
- Timecode-based: frame_rate, ffoa, lfoa, start_time (all four required together)
|
|
92
|
+
- Offset-based: frame_rate, head_leader_length, tail_leader_length (all three required together)
|
|
93
|
+
|
|
80
94
|
Args:
|
|
81
|
-
frame_rate (FrameRate, optional): The frame rate enum.
|
|
82
|
-
ffoa (str, optional): The first frame of
|
|
83
|
-
lfoa (str, optional): The last frame of
|
|
84
|
-
start_time (str, optional): The start
|
|
95
|
+
frame_rate (FrameRate, optional): The frame rate enum. Required with all timing groups.
|
|
96
|
+
ffoa (str, optional): The first frame of action timecode. Defaults to None.
|
|
97
|
+
lfoa (str, optional): The last frame of action timecode. Defaults to None.
|
|
98
|
+
start_time (str, optional): The file start timecode. When provided with frame_rate,
|
|
99
|
+
also used to calculate bext_time_reference for resources. Defaults to None.
|
|
100
|
+
head_leader_length (int, optional): Head leader length in frames. Defaults to None.
|
|
101
|
+
tail_leader_length (int, optional): Tail leader length in frames. Defaults to None.
|
|
85
102
|
|
|
86
103
|
Returns:
|
|
87
104
|
JobPayloadBuilder: The builder instance for fluent chaining.
|
|
88
105
|
|
|
106
|
+
Raises:
|
|
107
|
+
ValueError: If both timecode-based and offset-based parameters are provided.
|
|
108
|
+
ValueError: If only some parameters from a group are provided.
|
|
109
|
+
|
|
89
110
|
"""
|
|
111
|
+
# Define parameter groups (excluding frame_rate from the "other params" check)
|
|
112
|
+
timecode_other_params = {
|
|
113
|
+
"ffoa": ffoa,
|
|
114
|
+
"lfoa": lfoa,
|
|
115
|
+
"start_time": start_time
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
offset_other_params = {
|
|
119
|
+
"head_leader_length": head_leader_length,
|
|
120
|
+
"tail_leader_length": tail_leader_length
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
timecode_other_set = [k for k, v in timecode_other_params.items() if v is not None]
|
|
124
|
+
offset_other_set = [k for k, v in offset_other_params.items() if v is not None]
|
|
125
|
+
|
|
126
|
+
# Check mutual exclusivity between timecode and offset groups
|
|
127
|
+
if timecode_other_set and offset_other_set:
|
|
128
|
+
raise ValueError(
|
|
129
|
+
"Timecode-based parameters (frame_rate, ffoa, lfoa, start_time) "
|
|
130
|
+
"and offset-based parameters (frame_rate, head_leader_length, tail_leader_length) are mutually exclusive. "
|
|
131
|
+
"Please provide only one type of timing parameter."
|
|
132
|
+
)
|
|
133
|
+
|
|
134
|
+
# Validate timecode group - if any timecode param is provided, all must be provided (including frame_rate)
|
|
135
|
+
if timecode_other_set:
|
|
136
|
+
full_timecode_params = {
|
|
137
|
+
"frame_rate": frame_rate,
|
|
138
|
+
"ffoa": ffoa,
|
|
139
|
+
"lfoa": lfoa,
|
|
140
|
+
"start_time": start_time
|
|
141
|
+
}
|
|
142
|
+
complete_timecode_set = [k for k, v in full_timecode_params.items() if v is not None]
|
|
143
|
+
if len(complete_timecode_set) != len(full_timecode_params):
|
|
144
|
+
missing = [k for k, v in full_timecode_params.items() if v is None]
|
|
145
|
+
raise ValueError(
|
|
146
|
+
f"When using timecode-based parameters, all must be provided. "
|
|
147
|
+
f"Missing: {', '.join(missing)}"
|
|
148
|
+
)
|
|
149
|
+
|
|
150
|
+
# Validate offset group - if any offset param is provided, all must be provided (including frame_rate)
|
|
151
|
+
if offset_other_set:
|
|
152
|
+
full_offset_params = {
|
|
153
|
+
"frame_rate": frame_rate,
|
|
154
|
+
"head_leader_length": head_leader_length,
|
|
155
|
+
"tail_leader_length": tail_leader_length
|
|
156
|
+
}
|
|
157
|
+
complete_offset_set = [k for k, v in full_offset_params.items() if v is not None]
|
|
158
|
+
if len(complete_offset_set) != len(full_offset_params):
|
|
159
|
+
missing = [k for k, v in full_offset_params.items() if v is None]
|
|
160
|
+
raise ValueError(
|
|
161
|
+
f"When using offset-based parameters, all must be provided. "
|
|
162
|
+
f"Missing: {', '.join(missing)}"
|
|
163
|
+
)
|
|
164
|
+
|
|
165
|
+
# Check if frame_rate is provided alone (without any other timing params)
|
|
166
|
+
if frame_rate is not None and not timecode_other_set and not offset_other_set:
|
|
167
|
+
raise ValueError(
|
|
168
|
+
"frame_rate cannot be used alone. It must be provided as part of either: "
|
|
169
|
+
"timecode-based parameters (frame_rate, ffoa, lfoa, start_time) or "
|
|
170
|
+
"offset-based parameters (frame_rate, head_leader_length, tail_leader_length)."
|
|
171
|
+
)
|
|
172
|
+
|
|
90
173
|
self._time_options["frame_rate"] = frame_rate
|
|
91
174
|
self._time_options["ffoa"] = ffoa
|
|
92
175
|
self._time_options["lfoa"] = lfoa
|
|
93
176
|
if start_time is not None:
|
|
94
|
-
self._time_options["
|
|
177
|
+
self._time_options["file_start_timecode"] = start_time
|
|
178
|
+
if frame_rate is not None:
|
|
179
|
+
self._time_options["start_time"] = tc_to_time_seconds(start_time, frame_rate)
|
|
180
|
+
|
|
181
|
+
if head_leader_length is not None:
|
|
182
|
+
self._time_options["head_leader_length"] = head_leader_length
|
|
183
|
+
self._time_options["tail_leader_length"] = tail_leader_length
|
|
184
|
+
|
|
95
185
|
return self
|
|
96
186
|
|
|
97
187
|
def with_essences(self, essences: List[Essence]) -> "JobPayloadBuilder":
|
|
@@ -357,16 +447,34 @@ class JobPayloadBuilder:
|
|
|
357
447
|
if not self._venue:
|
|
358
448
|
raise ValueError("Cannot build job payload: A venue must be set.")
|
|
359
449
|
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
450
|
+
# Apply timing overrides to all essences BEFORE serialization
|
|
451
|
+
for essence in self._essences:
|
|
452
|
+
# Apply frame rate if set (can be independent of other params)
|
|
453
|
+
if self._time_options.get("frame_rate"):
|
|
454
|
+
frame_rate = self._time_options["frame_rate"]
|
|
455
|
+
essence.payload["definition"]["frame_rate"] = frame_rate.value if hasattr(frame_rate, 'value') else frame_rate
|
|
456
|
+
|
|
457
|
+
# Apply timecode-based params (mutually exclusive with offset)
|
|
458
|
+
if self._time_options.get("ffoa"):
|
|
459
|
+
# Remove offset params if they exist
|
|
460
|
+
essence.payload["definition"].pop("head_leader_length", None)
|
|
461
|
+
essence.payload["definition"].pop("tail_leader_length", None)
|
|
462
|
+
|
|
463
|
+
# Set timecode params
|
|
464
|
+
essence.payload["definition"]["ffoa_timecode"] = self._time_options["ffoa"]
|
|
465
|
+
essence.payload["definition"]["lfoa_timecode"] = self._time_options["lfoa"]
|
|
466
|
+
essence.payload["definition"]["file_start_timecode"] = self._time_options["file_start_timecode"]
|
|
467
|
+
|
|
468
|
+
# Apply offset-based params (mutually exclusive with timecode)
|
|
469
|
+
elif self._time_options.get("head_leader_length") is not None:
|
|
470
|
+
# Remove timecode params if they exist
|
|
471
|
+
essence.payload["definition"].pop("ffoa_timecode", None)
|
|
472
|
+
essence.payload["definition"].pop("lfoa_timecode", None)
|
|
473
|
+
essence.payload["definition"].pop("file_start_timecode", None)
|
|
474
|
+
|
|
475
|
+
# Set offset params
|
|
476
|
+
essence.payload["definition"]["head_leader_length"] = self._time_options["head_leader_length"]
|
|
477
|
+
essence.payload["definition"]["tail_leader_length"] = self._time_options["tail_leader_length"]
|
|
370
478
|
|
|
371
479
|
sources = [e.dict() for e in self._essences]
|
|
372
480
|
|
|
@@ -388,13 +496,6 @@ class JobPayloadBuilder:
|
|
|
388
496
|
"sources": sources,
|
|
389
497
|
}
|
|
390
498
|
|
|
391
|
-
if fr is not None:
|
|
392
|
-
wf_in["source_frame_rate"] = fr
|
|
393
|
-
if ffoa is not None:
|
|
394
|
-
wf_in["ffoa_timecode"] = ffoa
|
|
395
|
-
if lfoa is not None:
|
|
396
|
-
wf_in["lfoa_timecode"] = lfoa
|
|
397
|
-
|
|
398
499
|
if self._edits:
|
|
399
500
|
wf_in["edits"] = self._edits
|
|
400
501
|
|
|
@@ -449,6 +550,9 @@ class Job:
|
|
|
449
550
|
Returns:
|
|
450
551
|
requests.Response: The validation response object.
|
|
451
552
|
|
|
553
|
+
Raises:
|
|
554
|
+
CodaAPIError: If validation fails (HTTP 4XX or 5XX response).
|
|
555
|
+
|
|
452
556
|
"""
|
|
453
557
|
endpoint = f"/interface/v2/groups/{self.group_id}/jobs/validate?skip_cloud_validation={skip_cloud_validation}"
|
|
454
558
|
return make_request(requests.post, endpoint, self.payload)
|
|
@@ -459,15 +563,13 @@ class Job:
|
|
|
459
563
|
Returns:
|
|
460
564
|
int | None: The job ID if successful, otherwise None.
|
|
461
565
|
|
|
566
|
+
Raises:
|
|
567
|
+
CodaAPIError: If validation or job execution fails (HTTP 4XX or 5XX response).
|
|
568
|
+
|
|
462
569
|
"""
|
|
463
570
|
print("Validating job payload.", file=sys.stderr)
|
|
464
571
|
validation_result = self.validate()
|
|
465
572
|
|
|
466
|
-
if validation_result.status_code != 200:
|
|
467
|
-
print("Job validation failed. Cannot run job.", file=sys.stderr)
|
|
468
|
-
print(validation_result.json(), file=sys.stderr)
|
|
469
|
-
return None
|
|
470
|
-
|
|
471
573
|
print("Launching job.", file=sys.stderr)
|
|
472
574
|
endpoint = f"/interface/v2/groups/{self.group_id}/jobs"
|
|
473
575
|
response = make_request(requests.post, endpoint, self.payload)
|
|
@@ -490,20 +592,14 @@ class Job:
|
|
|
490
592
|
dict: The coda edge payload.
|
|
491
593
|
|
|
492
594
|
Raises:
|
|
493
|
-
|
|
595
|
+
CodaAPIError: If job validation or edge payload retrieval fails.
|
|
494
596
|
|
|
495
597
|
"""
|
|
496
598
|
validation_result = self.validate(skip_cloud_validation=skip_cloud_validation)
|
|
497
599
|
|
|
498
|
-
if validation_result.status_code != 200:
|
|
499
|
-
raise RuntimeError(f"Edge job validation failed. \nStatus: {validation_result.status_code}\n Resp: {validation_result.json()}")
|
|
500
|
-
|
|
501
600
|
endpoint = f"/interface/v2/groups/{self.group_id}/edge?skip_cloud_validation={skip_cloud_validation}"
|
|
502
601
|
response = make_request(requests.post, endpoint, self.payload)
|
|
503
602
|
|
|
504
|
-
if response.status_code != 200:
|
|
505
|
-
raise RuntimeError(f"Edge payload retrieval failed with status code: {response.json()}")
|
|
506
|
-
|
|
507
603
|
try:
|
|
508
604
|
edge_payload = response.json()
|
|
509
605
|
except Exception as err:
|
|
@@ -524,6 +620,9 @@ class Job:
|
|
|
524
620
|
Returns:
|
|
525
621
|
requests.Response: The raw payload validation response object.
|
|
526
622
|
|
|
623
|
+
Raises:
|
|
624
|
+
CodaAPIError: If validation fails (HTTP 4XX or 5XX response).
|
|
625
|
+
|
|
527
626
|
"""
|
|
528
627
|
group_id = validate_group_id()
|
|
529
628
|
endpoint = f"/interface/v2/groups/{group_id}/jobs/validate"
|
|
@@ -541,14 +640,13 @@ class Job:
|
|
|
541
640
|
Returns:
|
|
542
641
|
int | None: The job ID if successful, otherwise None.
|
|
543
642
|
|
|
643
|
+
Raises:
|
|
644
|
+
CodaAPIError: If validation or job execution fails (HTTP 4XX or 5XX response).
|
|
645
|
+
|
|
544
646
|
"""
|
|
545
647
|
group_id = validate_group_id()
|
|
546
648
|
|
|
547
649
|
validation_result = Job.validate_raw_payload(json_payload)
|
|
548
|
-
if validation_result.status_code != 200:
|
|
549
|
-
print("Raw payload validation failed. Cannot run job.", file=sys.stderr)
|
|
550
|
-
print(validation_result.json(), file=sys.stderr)
|
|
551
|
-
return None
|
|
552
650
|
|
|
553
651
|
endpoint = f"/interface/v2/groups/{group_id}/jobs"
|
|
554
652
|
response = make_request(requests.post, endpoint, json_payload)
|
|
@@ -565,7 +663,8 @@ class Job:
|
|
|
565
663
|
"""Get the status of a job.
|
|
566
664
|
|
|
567
665
|
This method polls the API for the job's status and will retry up to 3 times
|
|
568
|
-
if
|
|
666
|
+
if a server or client error (5XX or certain 4XX responses) is encountered.
|
|
667
|
+
Returns None if all retries are exhausted.
|
|
569
668
|
|
|
570
669
|
Args:
|
|
571
670
|
job_id (int): The ID of the job.
|
|
@@ -573,24 +672,30 @@ class Job:
|
|
|
573
672
|
Returns:
|
|
574
673
|
dict | None: The job status and progress if successful, otherwise None.
|
|
575
674
|
|
|
675
|
+
Raises:
|
|
676
|
+
CodaAuthenticationError: If API returns 401 (unauthorized).
|
|
677
|
+
CodaForbiddenError: If API returns 403 (insufficient permissions).
|
|
678
|
+
CodaNotFoundError: If API returns 404 (job not found).
|
|
679
|
+
|
|
576
680
|
"""
|
|
577
681
|
group_id = validate_group_id()
|
|
578
|
-
ret = make_request(
|
|
579
|
-
requests.get, f"/interface/v2/groups/{group_id}/jobs/{job_id}"
|
|
580
|
-
)
|
|
581
|
-
j = ret.json()
|
|
582
682
|
error_count = 0
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
683
|
+
max_retries = 3
|
|
684
|
+
|
|
685
|
+
while error_count < max_retries:
|
|
686
|
+
try:
|
|
687
|
+
ret = make_request(
|
|
688
|
+
requests.get, f"/interface/v2/groups/{group_id}/jobs/{job_id}"
|
|
689
|
+
)
|
|
690
|
+
j = ret.json()
|
|
691
|
+
return {"status": j["status"], "progress": j["progress"]}
|
|
692
|
+
except (CodaServerError, CodaClientError) as e:
|
|
693
|
+
error_count += 1
|
|
694
|
+
if error_count >= max_retries:
|
|
695
|
+
print(f"error in get_status (attempt {error_count}): {e}", file=sys.stderr)
|
|
696
|
+
return None
|
|
697
|
+
print(f"error in get_status (attempt {error_count}): {e}", file=sys.stderr)
|
|
698
|
+
time.sleep(1)
|
|
594
699
|
|
|
595
700
|
@staticmethod
|
|
596
701
|
def get_report(job_id: int) -> dict:
|
|
@@ -602,6 +707,9 @@ class Job:
|
|
|
602
707
|
Returns:
|
|
603
708
|
dict: The job report JSON.
|
|
604
709
|
|
|
710
|
+
Raises:
|
|
711
|
+
CodaAPIError: If report retrieval fails (HTTP 4XX or 5XX response).
|
|
712
|
+
|
|
605
713
|
"""
|
|
606
714
|
ret = make_request(requests.get, f"/interface/v2/report/{job_id}/raw")
|
|
607
715
|
return ret.json()
|
|
@@ -617,6 +725,10 @@ class Job:
|
|
|
617
725
|
Returns:
|
|
618
726
|
list: List of jobs within the date range.
|
|
619
727
|
|
|
728
|
+
Raises:
|
|
729
|
+
CodaAPIError: If query fails (HTTP 4XX or 5XX response).
|
|
730
|
+
|
|
620
731
|
"""
|
|
621
|
-
|
|
732
|
+
group_id = validate_group_id()
|
|
733
|
+
ret = make_request(requests.get, f"/interface/v2/groups/{group_id}/jobs?sort=asc&start_date={start_date}&end_date={end_date}")
|
|
622
734
|
return ret.json()
|
{sprocket_systems_coda_sdk-2.0.10 → sprocket_systems_coda_sdk-2.0.12}/src/coda/sdk/preset.py
RENAMED
|
@@ -137,10 +137,7 @@ class Preset:
|
|
|
137
137
|
|
|
138
138
|
route = Preset.routes[preset_type].replace(":group_id", str(group_id))
|
|
139
139
|
ret = make_request(requests.get, f"/interface/v2/{route}")
|
|
140
|
-
|
|
141
|
-
if "error" in j:
|
|
142
|
-
raise ValueError(f"Unable to find preset '{preset_type}': {j}")
|
|
143
|
-
return j
|
|
140
|
+
return ret.json()
|
|
144
141
|
|
|
145
142
|
@staticmethod
|
|
146
143
|
def get_group_id_by_name(group_name: str) -> str:
|
|
@@ -3,7 +3,7 @@ import requests
|
|
|
3
3
|
import re
|
|
4
4
|
import urllib3
|
|
5
5
|
|
|
6
|
-
from typing import TYPE_CHECKING, List, Dict, Any, Callable
|
|
6
|
+
from typing import TYPE_CHECKING, List, Dict, Any, Callable, cast
|
|
7
7
|
|
|
8
8
|
from .constants import (
|
|
9
9
|
ENV_CODA_API_GROUP_ID,
|
|
@@ -13,6 +13,15 @@ from .constants import (
|
|
|
13
13
|
DEFAULT_API_URL,
|
|
14
14
|
INSECURE_SKIP_VERIFY_VALUES,
|
|
15
15
|
)
|
|
16
|
+
from .exceptions import (
|
|
17
|
+
CodaAPIError,
|
|
18
|
+
CodaAuthenticationError,
|
|
19
|
+
CodaForbiddenError,
|
|
20
|
+
CodaBadRequestError,
|
|
21
|
+
CodaNotFoundError,
|
|
22
|
+
CodaClientError,
|
|
23
|
+
CodaServerError,
|
|
24
|
+
)
|
|
16
25
|
|
|
17
26
|
if TYPE_CHECKING:
|
|
18
27
|
from ..tc_tools import (
|
|
@@ -81,6 +90,7 @@ def user_info() -> str:
|
|
|
81
90
|
return ret.json()
|
|
82
91
|
|
|
83
92
|
|
|
93
|
+
|
|
84
94
|
def validate_group_id() -> str:
|
|
85
95
|
"""Get the Coda Group ID from environment variables.
|
|
86
96
|
|
|
@@ -113,6 +123,8 @@ def make_request(
|
|
|
113
123
|
headers, and executes the request using the provided function (e.g.,
|
|
114
124
|
requests.get, requests.post).
|
|
115
125
|
|
|
126
|
+
HTTP errors are automatically detected and converted to informative exceptions.
|
|
127
|
+
|
|
116
128
|
Args:
|
|
117
129
|
func (Callable[..., requests.Response]): The requests function to call
|
|
118
130
|
(e.g., requests.get, requests.post, requests.put).
|
|
@@ -122,9 +134,17 @@ def make_request(
|
|
|
122
134
|
|
|
123
135
|
Raises:
|
|
124
136
|
ValueError: If the 'CODA_API_TOKEN' environment variable is not set.
|
|
137
|
+
CodaAuthenticationError: If the API returns 401 Unauthorized.
|
|
138
|
+
CodaForbiddenError: If the API returns 403 Forbidden.
|
|
139
|
+
CodaBadRequestError: If the API returns 400 Bad Request.
|
|
140
|
+
CodaNotFoundError: If the API returns 404 Not Found.
|
|
141
|
+
CodaClientError: If the API returns other 4XX errors.
|
|
142
|
+
CodaServerError: If the API returns 5XX server errors.
|
|
143
|
+
CodaAPIError: If the API returns an unexpected status code (e.g., 3XX).
|
|
125
144
|
|
|
126
145
|
Returns:
|
|
127
146
|
requests.Response: The Response object from the `requests` library.
|
|
147
|
+
Only returned for successful 2XX status codes.
|
|
128
148
|
|
|
129
149
|
"""
|
|
130
150
|
url = os.getenv(ENV_CODA_API_URL, DEFAULT_API_URL)
|
|
@@ -136,7 +156,12 @@ def make_request(
|
|
|
136
156
|
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
|
137
157
|
verify = False
|
|
138
158
|
auth = {"Authorization": f"Bearer {token}"}
|
|
139
|
-
|
|
159
|
+
response = func(url, json=payload, headers=auth, verify=verify)
|
|
160
|
+
|
|
161
|
+
# Check HTTP status and raise appropriate exception for errors
|
|
162
|
+
_check_response_status(response, route)
|
|
163
|
+
|
|
164
|
+
return response
|
|
140
165
|
raise ValueError("Error: CODA_API_TOKEN is not set.")
|
|
141
166
|
|
|
142
167
|
|
|
@@ -280,3 +305,133 @@ def is_key_value_comma_string(s: str) -> bool:
|
|
|
280
305
|
pattern = r"^([A-Z0-9_]+=[a-zA-Z0-9_-]+)(,[A-Z0-9_]+=[a-zA-Z0-9_-]+)*$"
|
|
281
306
|
|
|
282
307
|
return re.fullmatch(pattern, s) is not None
|
|
308
|
+
|
|
309
|
+
def _extract_error_detail(response: requests.Response) -> str:
|
|
310
|
+
"""Extract error detail from response body.
|
|
311
|
+
|
|
312
|
+
Tries multiple strategies to get meaningful error info:
|
|
313
|
+
1. Check for 'error' key in JSON
|
|
314
|
+
2. Check for 'message' key in JSON
|
|
315
|
+
3. Check for 'errors' array in JSON
|
|
316
|
+
4. Fall back to response.text (truncated)
|
|
317
|
+
|
|
318
|
+
Args:
|
|
319
|
+
response: The requests Response object
|
|
320
|
+
|
|
321
|
+
Returns:
|
|
322
|
+
str: Error detail string, or empty string if none found
|
|
323
|
+
"""
|
|
324
|
+
try:
|
|
325
|
+
response_json = response.json()
|
|
326
|
+
if isinstance(response_json, dict):
|
|
327
|
+
# Check common error keys
|
|
328
|
+
if "error" in response_json:
|
|
329
|
+
error = response_json["error"]
|
|
330
|
+
# Handle nested error objects
|
|
331
|
+
if isinstance(error, dict) and "message" in error:
|
|
332
|
+
return error["message"]
|
|
333
|
+
return str(error)
|
|
334
|
+
elif "message" in response_json:
|
|
335
|
+
return response_json["message"]
|
|
336
|
+
elif "errors" in response_json:
|
|
337
|
+
errors = response_json["errors"]
|
|
338
|
+
if isinstance(errors, list) and errors:
|
|
339
|
+
return "; ".join(str(e) for e in errors[:3]) # First 3 errors
|
|
340
|
+
return str(errors)
|
|
341
|
+
except Exception:
|
|
342
|
+
pass
|
|
343
|
+
|
|
344
|
+
# Fall back to text response, truncated
|
|
345
|
+
if response.text:
|
|
346
|
+
text = response.text.strip()
|
|
347
|
+
if len(text) > 200:
|
|
348
|
+
return text[:200] + "..."
|
|
349
|
+
return text
|
|
350
|
+
|
|
351
|
+
return ""
|
|
352
|
+
|
|
353
|
+
|
|
354
|
+
def _check_response_status(response: requests.Response, endpoint: str) -> None:
|
|
355
|
+
"""Check response status code and raise exception for HTTP errors.
|
|
356
|
+
|
|
357
|
+
Raises specific exception types based on the HTTP status code to enable
|
|
358
|
+
targeted error handling. All exceptions include the status code, endpoint,
|
|
359
|
+
and extracted error details from the response body.
|
|
360
|
+
|
|
361
|
+
Args:
|
|
362
|
+
response: The requests Response object to check
|
|
363
|
+
endpoint: The API endpoint being called (for error messages)
|
|
364
|
+
|
|
365
|
+
Raises:
|
|
366
|
+
CodaAuthenticationError: For 401 status
|
|
367
|
+
CodaForbiddenError: For 403 status
|
|
368
|
+
CodaBadRequestError: For 400 status
|
|
369
|
+
CodaNotFoundError: For 404 status
|
|
370
|
+
CodaClientError: For other 4XX statuses
|
|
371
|
+
CodaServerError: For 5XX statuses
|
|
372
|
+
"""
|
|
373
|
+
status_code: int = cast(int, response.status_code)
|
|
374
|
+
|
|
375
|
+
# Success - no exception needed
|
|
376
|
+
if 200 <= status_code < 300:
|
|
377
|
+
return
|
|
378
|
+
|
|
379
|
+
error_detail = _extract_error_detail(response)
|
|
380
|
+
base_message = f"HTTP {status_code} error for endpoint '{endpoint}'"
|
|
381
|
+
if error_detail:
|
|
382
|
+
full_message = f"{base_message}: {error_detail}"
|
|
383
|
+
else:
|
|
384
|
+
full_message = base_message
|
|
385
|
+
|
|
386
|
+
# Raise specific exception based on status code
|
|
387
|
+
if status_code == 401:
|
|
388
|
+
raise CodaAuthenticationError(
|
|
389
|
+
full_message,
|
|
390
|
+
status_code=status_code,
|
|
391
|
+
response=response,
|
|
392
|
+
endpoint=endpoint
|
|
393
|
+
)
|
|
394
|
+
elif status_code == 403:
|
|
395
|
+
raise CodaForbiddenError(
|
|
396
|
+
full_message,
|
|
397
|
+
status_code=status_code,
|
|
398
|
+
response=response,
|
|
399
|
+
endpoint=endpoint
|
|
400
|
+
)
|
|
401
|
+
elif status_code == 400:
|
|
402
|
+
raise CodaBadRequestError(
|
|
403
|
+
full_message,
|
|
404
|
+
status_code=status_code,
|
|
405
|
+
response=response,
|
|
406
|
+
endpoint=endpoint
|
|
407
|
+
)
|
|
408
|
+
elif status_code == 404:
|
|
409
|
+
raise CodaNotFoundError(
|
|
410
|
+
full_message,
|
|
411
|
+
status_code=status_code,
|
|
412
|
+
response=response,
|
|
413
|
+
endpoint=endpoint
|
|
414
|
+
)
|
|
415
|
+
elif 400 <= status_code < 500:
|
|
416
|
+
raise CodaClientError(
|
|
417
|
+
full_message,
|
|
418
|
+
status_code=status_code,
|
|
419
|
+
response=response,
|
|
420
|
+
endpoint=endpoint
|
|
421
|
+
)
|
|
422
|
+
elif 500 <= status_code < 600:
|
|
423
|
+
raise CodaServerError(
|
|
424
|
+
full_message,
|
|
425
|
+
status_code=status_code,
|
|
426
|
+
response=response,
|
|
427
|
+
endpoint=endpoint
|
|
428
|
+
)
|
|
429
|
+
else:
|
|
430
|
+
# Unexpected status code (3XX, 1XX, etc.)
|
|
431
|
+
raise CodaAPIError(
|
|
432
|
+
full_message,
|
|
433
|
+
status_code=status_code,
|
|
434
|
+
response=response,
|
|
435
|
+
endpoint=endpoint
|
|
436
|
+
)
|
|
437
|
+
|
{sprocket_systems_coda_sdk-2.0.10 → sprocket_systems_coda_sdk-2.0.12}/src/coda/sdk/workflow.py
RENAMED
|
@@ -180,7 +180,7 @@ class WorkflowDefinitionBuilder:
|
|
|
180
180
|
|
|
181
181
|
if isinstance(loudness_preset, dict) and "tolerances" not in loudness_preset:
|
|
182
182
|
loudness_preset["tolerances"] = _DEFAULT_LOUDNESS_TOLERANCES.copy()
|
|
183
|
-
process_block_config = {
|
|
183
|
+
process_block_config: Dict[str, Any] = {
|
|
184
184
|
"name": name,
|
|
185
185
|
"input_filter": input_filter,
|
|
186
186
|
"output_settings": {
|
|
@@ -1214,7 +1214,7 @@ class WorkflowDefinitionBuilder:
|
|
|
1214
1214
|
WorkflowDefinition: A new Workflow instance containing the built definition.
|
|
1215
1215
|
|
|
1216
1216
|
"""
|
|
1217
|
-
definition = {
|
|
1217
|
+
definition: Dict[str, Any] = {
|
|
1218
1218
|
"name": self._name,
|
|
1219
1219
|
"process_blocks": copy.deepcopy(self._process_blocks),
|
|
1220
1220
|
"packages": copy.deepcopy(self._packages),
|
|
File without changes
|
|
File without changes
|
{sprocket_systems_coda_sdk-2.0.10 → sprocket_systems_coda_sdk-2.0.12}/src/coda/sdk/constants.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|