sprocket-systems.coda.sdk 1.3.2__py3-none-any.whl → 2.0.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
coda/sdk/job.py ADDED
@@ -0,0 +1,582 @@
1
+ """Job and JobPayloadBuilder modules for creating and managing Coda jobs."""
2
+
3
+ import copy
4
+ import requests
5
+ import sys
6
+ import time
7
+
8
+ from typing import TYPE_CHECKING, List, Dict, Any
9
+ from coda.sdk.enums import Format, FrameRate, Language, VenueType
10
+ from .constants import DEFAULT_PROGRAM_ID
11
+ from .essence import Essence
12
+ from .utils import validate_group_id, make_request
13
+ from ..tc_tools import tc_to_time_seconds
14
+
15
+ if TYPE_CHECKING:
16
+ from .workflow import WorkflowDefinition
17
+
18
+
19
+ class JobPayloadBuilder:
20
+ """Coda Job payload builder."""
21
+
22
+ def __init__(self, name: str) -> None:
23
+ """Initialize the CodaJobBuilder.
24
+
25
+ This constructor sets up a new job payload builder with a given name and
26
+ initializes the internal attributes that will be configured through
27
+ the builder's various `.with_*()` methods.
28
+
29
+ Args:
30
+ name (str): The name of the job, used for identification.
31
+
32
+ Raises:
33
+ ValueError: If the provided name is empty.
34
+
35
+ """
36
+ if not name or not isinstance(name, str):
37
+ raise ValueError("Job name must be a non-empty string.")
38
+
39
+ self._name: str = name
40
+ self._group_id: str = validate_group_id()
41
+ self._venue: str = VenueType.NEARFIELD
42
+ self._language: str = Language.UNDETERMINED
43
+ self._essences: List[Essence] = []
44
+ self._time_options: Dict = {}
45
+ self._workflow: WorkflowDefinition | None = None
46
+ self._edits: Dict | None = None
47
+ self._reference_run: int | None = None
48
+
49
+ def with_venue(self, venue: VenueType) -> "JobPayloadBuilder":
50
+ """Set the input venue for the job.
51
+
52
+ Args:
53
+ venue (VenueType): The input venue enum.
54
+
55
+ Returns:
56
+ JobPayloadBuilder: The builder instance for fluent chaining.
57
+
58
+ """
59
+ self._venue = venue.value
60
+ return self
61
+
62
+ def with_language(self, language: Language) -> "JobPayloadBuilder":
63
+ """Set the output language for the job.
64
+
65
+ Args:
66
+ language (Language): The output language enum.
67
+
68
+ Returns:
69
+ JobPayloadBuilder: The builder instance for fluent chaining.
70
+
71
+ """
72
+ self._language = language.value
73
+ return self
74
+
75
+ def with_input_timing(
76
+ self, frame_rate: FrameRate | None = None, ffoa: str | None = None, lfoa: str | None = None, start_time: str | None = None
77
+ ) -> "JobPayloadBuilder":
78
+ """Set the input timing information for the source files.
79
+
80
+ Args:
81
+ frame_rate (FrameRate, optional): The frame rate enum. Defaults to None.
82
+ ffoa (str, optional): The first frame of audio timecode. Defaults to None.
83
+ lfoa (str, optional): The last frame of audio timecode. Defaults to None.
84
+ start_time (str, optional): The start time in timecode format. Defaults to None.
85
+
86
+ Returns:
87
+ JobPayloadBuilder: The builder instance for fluent chaining.
88
+
89
+ """
90
+ self._time_options["frame_rate"] = frame_rate
91
+ self._time_options["ffoa"] = ffoa
92
+ self._time_options["lfoa"] = lfoa
93
+ if start_time is not None:
94
+ self._time_options["start_time"] = tc_to_time_seconds(start_time, frame_rate)
95
+ return self
96
+
97
+ def with_essences(self, essences: List[Essence]) -> "JobPayloadBuilder":
98
+ """Add a list of Essence objects to the job's inputs.
99
+
100
+ Args:
101
+ essences (List[Essence]): A list of Essence objects.
102
+
103
+ Returns:
104
+ JobPayloadBuilder: The builder instance for fluent chaining.
105
+
106
+ Raises:
107
+ TypeError: If input is not a list or if any item in the list is not an Essence object.
108
+
109
+ """
110
+ if not isinstance(essences, list):
111
+ raise TypeError("Essences must be provided as a list.")
112
+
113
+ for essence in essences:
114
+ if not isinstance(essence, Essence):
115
+ raise TypeError(f"All items in list must be CodaEssence objects, but found an object of type {type(essence).__name__}.")
116
+
117
+ self._essences.extend(essences)
118
+
119
+ return self
120
+
121
+ def with_workflow(self, workflow: "WorkflowDefinition") -> "JobPayloadBuilder":
122
+ """Set the WorkflowDefinition for the job.
123
+
124
+ Args:
125
+ workflow (WorkflowDefinition): The WorkflowDefinition object.
126
+
127
+ Returns:
128
+ JobPayloadBuilder: The builder instance for fluent chaining.
129
+
130
+ Raises:
131
+ TypeError: If the provided workflow is not a valid WorkflowDefinition object.
132
+
133
+ """
134
+ if not hasattr(workflow, "__class__") or workflow.__class__.__name__ != "WorkflowDefinition":
135
+ raise TypeError(f"Workflow must be a valid Workflow object, but received type {type(workflow).__name__}.")
136
+
137
+ self._workflow = copy.deepcopy(workflow)
138
+ return self
139
+
140
+ def with_edits(self, edits: dict) -> "JobPayloadBuilder":
141
+ """Add an edit payload for reel splitting.
142
+
143
+ Args:
144
+ edits (dict): The reel edit payload dictionary.
145
+
146
+ Returns:
147
+ JobPayloadBuilder: The builder instance for fluent chaining.
148
+
149
+ Raises:
150
+ TypeError: If the provided edits is not a dictionary.
151
+
152
+ Example:
153
+ edit_payload = {
154
+ "reel_splitting": {
155
+ "leader_integer_seconds": 8,
156
+ "prelap_integer_seconds": 2,
157
+ "overlap_integer_seconds": 2,
158
+ "tail_leader_integer_seconds": 2,
159
+ "reel_pops": True
160
+ },
161
+ "reels": [
162
+ {
163
+ "source_start_time": "01:00:00:00",
164
+ "dest_start_time": "00:00:00:00"
165
+ },
166
+ {
167
+ "source_start_time": "01:00:07:00",
168
+ "dest_start_time": "00:00:00:00"
169
+ },
170
+ {
171
+ "source_start_time": "01:00:12:00",
172
+ "dest_start_time": "00:00:00:00"
173
+ },
174
+ {
175
+ "source_start_time": "01:00:20:00",
176
+ "dest_start_time": "00:00:00:00"
177
+ }
178
+ ]
179
+ }
180
+
181
+ """
182
+ if not isinstance(edits, dict):
183
+ raise TypeError("Edits must be provided as a dictionary.")
184
+
185
+ self._edits = edits
186
+ return self
187
+
188
+ def with_reference_job(self, job_id: int) -> "JobPayloadBuilder":
189
+ """Set a parent job to use for cache referencing.
190
+
191
+ Args:
192
+ job_id (int): The ID of the reference job.
193
+
194
+ Returns:
195
+ JobPayloadBuilder: The builder instance for fluent chaining.
196
+
197
+ Raises:
198
+ TypeError: If the provided job_id is not an integer.
199
+
200
+ """
201
+ if not isinstance(job_id, int):
202
+ raise TypeError("Reference job ID must be an integer.")
203
+
204
+ self._reference_run = job_id
205
+ return self
206
+
207
+ def with_forced_imax5(self) -> "JobPayloadBuilder":
208
+ """Force all essences to the 'imax5' format, if compatible.
209
+
210
+ Verifies that all added essences have a compatible format ('5.0' or
211
+ 'imax5') and then updates them.
212
+
213
+ Returns:
214
+ JobPayloadBuilder: The builder instance for fluent chaining.
215
+
216
+ Raises:
217
+ ValueError: If any essence has an incompatible format.
218
+
219
+ """
220
+ incompatible_formats = [
221
+ essence.payload["definition"]["format"]
222
+ for essence in self._essences
223
+ if essence.payload["definition"]["format"] not in ["5.0", "imax5"]
224
+ ]
225
+
226
+ if incompatible_formats:
227
+ raise ValueError(
228
+ f"Cannot force imax5. Incompatible formats found: {', '.join(set(incompatible_formats))}"
229
+ )
230
+
231
+ for essence in self._essences:
232
+ essence.payload["definition"]["format"] = "imax5"
233
+
234
+ return self
235
+
236
+ def with_input_language(self, language: Language) -> "JobPayloadBuilder":
237
+ """Set the language for all input essences.
238
+
239
+ Args:
240
+ language (Language): The language code.
241
+
242
+ Returns:
243
+ JobPayloadBuilder: The builder instance for fluent chaining.
244
+
245
+ """
246
+ for essence in self._essences:
247
+ essence.payload["definition"]["language"] = language.value
248
+ return self
249
+
250
+ def with_program_for_type(
251
+ self, type: str, program: str = DEFAULT_PROGRAM_ID
252
+ ) -> "JobPayloadBuilder":
253
+ """Set the program for all essences of a specific type.
254
+
255
+ Args:
256
+ type (str): The essence type.
257
+ program (str, optional): The program name. Defaults to "program-1".
258
+
259
+ Returns:
260
+ JobPayloadBuilder: The builder instance for fluent chaining.
261
+
262
+ """
263
+ for essence in self._essences:
264
+ if type in essence.payload["definition"]["type"]:
265
+ essence.payload["definition"]["program"] = program
266
+ return self
267
+
268
+ def with_program_for_format(
269
+ self, format: Format, program: str = DEFAULT_PROGRAM_ID
270
+ ) -> "JobPayloadBuilder":
271
+ """Set the program for all essences of a specific format.
272
+
273
+ Args:
274
+ format (Format): The essence format.
275
+ program (str, optional): The program name. Defaults to "program-1".
276
+
277
+ Returns:
278
+ JobPayloadBuilder: The builder instance for fluent chaining.
279
+
280
+ """
281
+ for essence in self._essences:
282
+ if format == essence.payload["definition"]["format"]:
283
+ essence.payload["definition"]["program"] = program
284
+ return self
285
+
286
+ def with_unique_program(self, program: str = DEFAULT_PROGRAM_ID) -> "JobPayloadBuilder":
287
+ """Set the same program for all input essences.
288
+
289
+ Args:
290
+ program (str, optional): The program name. Defaults to "program-1".
291
+
292
+ Returns:
293
+ JobPayloadBuilder: The builder instance for fluent chaining.
294
+
295
+ """
296
+ for essence in self._essences:
297
+ essence.payload["definition"]["program"] = program
298
+ return self
299
+
300
+ def build(self) -> dict:
301
+ """Assemble and return the final job payload dictionary.
302
+
303
+ Returns:
304
+ dict: The assembled job payload dictionary.
305
+
306
+ Raises:
307
+ ValueError: If critical components like essences or workflow have not been set.
308
+
309
+ """
310
+ if not self._essences:
311
+ raise ValueError("Cannot build job payload: At least one essence must be added.")
312
+ if not self._workflow:
313
+ raise ValueError("Cannot build job payload: A workflow must be set.")
314
+ if not self._venue:
315
+ raise ValueError("Cannot build job payload: A venue must be set.")
316
+
317
+ ffoa = None
318
+ lfoa = None
319
+ fr = None
320
+
321
+ for e in self._essences:
322
+ essence_timing = e.payload["timing_info"]
323
+ if essence_timing:
324
+ ffoa = essence_timing["ffoa_timecode"]
325
+ lfoa = essence_timing["lfoa_timecode"]
326
+ fr = essence_timing["source_frame_rate"]
327
+
328
+ if self._time_options.get("frame_rate"):
329
+ fr = self._time_options.get("frame_rate")
330
+ if self._time_options.get("ffoa"):
331
+ ffoa = self._time_options.get("ffoa")
332
+ if self._time_options.get("lfoa"):
333
+ lfoa = self._time_options.get("lfoa")
334
+
335
+ sources = [e.dict() for e in self._essences]
336
+
337
+ start_time = self._time_options.get("start_time")
338
+ if start_time is not None:
339
+ for source_obj in sources:
340
+ definition = source_obj.get("definition", {})
341
+ if "resources" in definition:
342
+ for r in definition["resources"]:
343
+ if "sample_rate" in r:
344
+ r["bext_time_reference"] = int(start_time * r["sample_rate"])
345
+
346
+ wf_in = {
347
+ "project": {
348
+ "title": self._name,
349
+ "language": self._language,
350
+ },
351
+ "venue": self._venue,
352
+ "sources": sources,
353
+ "source_frame_rate": fr,
354
+ "ffoa_timecode": ffoa,
355
+ "lfoa_timecode": lfoa,
356
+ }
357
+
358
+ if self._edits:
359
+ wf_in["edits"] = self._edits
360
+
361
+ wf_def = copy.deepcopy(self._workflow.dict())
362
+
363
+ if "packages" in wf_def:
364
+ package_data = {}
365
+ for package_id, pdata in wf_def["packages"].items():
366
+ if "naming_convention" in pdata.get("definition", {}):
367
+ package_data[package_id] = {
368
+ "naming_convention": pdata["definition"]["naming_convention"]
369
+ }
370
+ del pdata["definition"]["naming_convention"]
371
+ if package_data:
372
+ wf_in["package_data"] = package_data
373
+
374
+ payload = {
375
+ "workflow_input": wf_in,
376
+ "workflow_definition": wf_def,
377
+ }
378
+
379
+ if self._reference_run:
380
+ payload["parent_job_id"] = self._reference_run
381
+
382
+ return payload
383
+
384
+
385
+ class Job:
386
+ """Create and manage Coda Jobs."""
387
+
388
+ def __init__(self, payload: dict) -> None:
389
+ """Initialize the CodaJob with a payload.
390
+
391
+ Args:
392
+ payload (dict): The job payload dictionary.
393
+
394
+ Raises:
395
+ ValueError: If payload is missing or invalid.
396
+
397
+ """
398
+ if not payload or "workflow_input" not in payload:
399
+ raise ValueError("Cannot initialize CodaJob with an invalid payload.")
400
+ self.payload = payload
401
+ self.group_id = validate_group_id()
402
+
403
+ def validate(self, skip_cloud_validation: bool = True) -> requests.Response:
404
+ """Validate the job payload against the Coda API.
405
+
406
+ Args:
407
+ skip_cloud_validation (bool, optional): Whether to skip cloud validation. Defaults to True.
408
+
409
+ Returns:
410
+ requests.Response: The validation response object.
411
+
412
+ """
413
+ endpoint = f"/interface/v2/groups/{self.group_id}/jobs/validate?skip_cloud_validation={skip_cloud_validation}"
414
+ return make_request(requests.post, endpoint, self.payload)
415
+
416
+ def run(self) -> int | None:
417
+ """Validate the payload against the Coda API and run the job.
418
+
419
+ Returns:
420
+ int | None: The job ID if successful, otherwise None.
421
+
422
+ """
423
+ print("Validating job payload.", file=sys.stderr)
424
+ validation_result = self.validate()
425
+
426
+ if validation_result.status_code != 200:
427
+ print("Job validation failed. Cannot run job.", file=sys.stderr)
428
+ print(validation_result.json(), file=sys.stderr)
429
+ return None
430
+
431
+ print("Launching job.", file=sys.stderr)
432
+ endpoint = f"/interface/v2/groups/{self.group_id}/jobs"
433
+ response = make_request(requests.post, endpoint, self.payload)
434
+ response_json = response.json()
435
+
436
+ print(response_json, file=sys.stderr)
437
+ if "errors" in response_json or "job_id" not in response_json:
438
+ return None
439
+
440
+ return int(response_json["job_id"])
441
+
442
+ def get_edge_payload(self, skip_cloud_validation: bool = True) -> dict:
443
+ """Get the raw conductor graph for a coda edge job.
444
+
445
+ Args:
446
+ skip_cloud_validation (bool, optional): Whether to skip cloud validation.
447
+ Defaults to True.
448
+
449
+ Returns:
450
+ dict: The coda edge payload.
451
+
452
+ Raises:
453
+ RuntimeError: If job validation fails or edge payload retrieval fails.
454
+
455
+ """
456
+ validation_result = self.validate(skip_cloud_validation=skip_cloud_validation)
457
+
458
+ if validation_result.status_code != 200:
459
+ raise RuntimeError(f"Edge job validation failed. \nStatus: {validation_result.status_code}\n Resp: {validation_result.json()}")
460
+
461
+ endpoint = f"/interface/v2/groups/{self.group_id}/edge?skip_cloud_validation={skip_cloud_validation}"
462
+ response = make_request(requests.post, endpoint, self.payload)
463
+
464
+ if response.status_code != 200:
465
+ raise RuntimeError(f"Edge payload retrieval failed with status code: {response.json()}")
466
+
467
+ try:
468
+ edge_payload = response.json()
469
+ except Exception as err:
470
+ raise RuntimeError(f"Error parsing edge payload response: {err}") from err
471
+
472
+ if "errors" in edge_payload:
473
+ raise RuntimeError(f"Edge payload retrieval failed with errors: {edge_payload['errors']}")
474
+
475
+ return edge_payload
476
+
477
+ @staticmethod
478
+ def validate_raw_payload(json_payload: dict) -> requests.Response:
479
+ """Validate a raw JSON payload dictionary against the Coda API.
480
+
481
+ Args:
482
+ json_payload (dict): The raw JSON payload dictionary.
483
+
484
+ Returns:
485
+ requests.Response: The raw payload validation response object.
486
+
487
+ """
488
+ group_id = validate_group_id()
489
+ endpoint = f"/interface/v2/groups/{group_id}/jobs/validate"
490
+ response = make_request(requests.post, endpoint, json_payload)
491
+ print("validate raw: ", response.json(), file=sys.stderr)
492
+ return response
493
+
494
+ @staticmethod
495
+ def run_raw_payload(json_payload: dict) -> int | None:
496
+ """Validate and run a job from a raw JSON payload dictionary.
497
+
498
+ Args:
499
+ json_payload (dict): The raw JSON payload dictionary.
500
+
501
+ Returns:
502
+ int | None: The job ID if successful, otherwise None.
503
+
504
+ """
505
+ group_id = validate_group_id()
506
+
507
+ validation_result = Job.validate_raw_payload(json_payload)
508
+ if validation_result.status_code != 200:
509
+ print("Raw payload validation failed. Cannot run job.", file=sys.stderr)
510
+ print(validation_result.json(), file=sys.stderr)
511
+ return None
512
+
513
+ endpoint = f"/interface/v2/groups/{group_id}/jobs"
514
+ response = make_request(requests.post, endpoint, json_payload)
515
+ response_json = response.json()
516
+
517
+ print(response_json, file=sys.stderr)
518
+ if "errors" in response_json or "job_id" not in response_json:
519
+ return None
520
+
521
+ return int(response_json["job_id"])
522
+
523
+ @staticmethod
524
+ def get_status(job_id: int) -> Dict[str, Any] | None:
525
+ """Get the status of a job.
526
+
527
+ This method polls the API for the job's status and will retry up to 3 times
528
+ if an error is encountered during the request.
529
+
530
+ Args:
531
+ job_id (int): The ID of the job.
532
+
533
+ Returns:
534
+ dict | None: The job status and progress if successful, otherwise None.
535
+
536
+ """
537
+ group_id = validate_group_id()
538
+ ret = make_request(
539
+ requests.get, f"/interface/v2/groups/{group_id}/jobs/{job_id}"
540
+ )
541
+ j = ret.json()
542
+ error_count = 0
543
+ while "error" in j and error_count < 3:
544
+ print("error in get_status: ", ret.status_code, j["error"], file=sys.stderr)
545
+ time.sleep(1)
546
+ ret = make_request(
547
+ requests.get, f"/interface/v2/groups/{group_id}/jobs/{job_id}"
548
+ )
549
+ j = ret.json()
550
+ error_count += 1
551
+ if "error" in j:
552
+ return None
553
+ return {"status": j["status"], "progress": j["progress"]}
554
+
555
+ @staticmethod
556
+ def get_report(job_id: int) -> dict:
557
+ """Get the report of a job.
558
+
559
+ Args:
560
+ job_id (int): The ID of the job.
561
+
562
+ Returns:
563
+ dict: The job report JSON.
564
+
565
+ """
566
+ ret = make_request(requests.get, f"/interface/v2/report/{job_id}/raw")
567
+ return ret.json()
568
+
569
+ @staticmethod
570
+ def get_jobs_by_date(start_date: str, end_date: str) -> list:
571
+ """Get jobs within a date range.
572
+
573
+ Args:
574
+ start_date (str): Start date for the query.
575
+ end_date (str): End date for the query.
576
+
577
+ Returns:
578
+ list: List of jobs within the date range.
579
+
580
+ """
581
+ ret = make_request(requests.get, f"/interface/v1/jobs?sort=asc&start_date={start_date}&end_date={end_date}")
582
+ return ret.json()