pinexq-client 0.9.2.20250908.48__py3-none-any.whl → 0.10.3rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  # generated by datamodel-codegen:
2
2
  # filename: openapi.json
3
- # timestamp: 2025-09-02T12:55:12+00:00
3
+ # timestamp: 2025-12-01T17:37:49+00:00
4
4
 
5
5
  from __future__ import annotations
6
6
 
@@ -43,6 +43,18 @@ class AdminWorkDataQueryResultHtoOpenApiProperties(BaseModel):
43
43
  remaining_tags: List[str] | None = Field(None, alias='RemainingTags')
44
44
 
45
45
 
46
+ class AssignCodeHashParameters(BaseModel):
47
+ model_config = ConfigDict(
48
+ extra='allow',
49
+ populate_by_name=True,
50
+ )
51
+ code_hash: constr(min_length=1) = Field(
52
+ ...,
53
+ alias='CodeHash',
54
+ description='The code hash of the processing to be deployed',
55
+ )
56
+
57
+
46
58
  class CopyPsFromOrgToUserActionParameters(BaseModel):
47
59
  model_config = ConfigDict(
48
60
  extra='allow',
@@ -117,6 +129,30 @@ class DataSpecificationHto(BaseModel):
117
129
  max_slots: int | None = Field(None, alias='MaxSlots')
118
130
 
119
131
 
132
+ class DeploymentResourcePresets(Enum):
133
+ small = 'Small'
134
+ medium = 'Medium'
135
+ large = 'Large'
136
+ x_large = 'XLarge'
137
+
138
+
139
+ class DeploymentStates(Enum):
140
+ undefined = 'Undefined'
141
+ not_deployed = 'NotDeployed'
142
+ platform = 'Platform'
143
+ external = 'External'
144
+ platform_suspended = 'PlatformSuspended'
145
+ external_suspended = 'ExternalSuspended'
146
+
147
+
148
+ class DeprecatePsActionParameters(BaseModel):
149
+ model_config = ConfigDict(
150
+ extra='allow',
151
+ populate_by_name=True,
152
+ )
153
+ reason: str | None = Field(None, alias='Reason')
154
+
155
+
120
156
  class EditProcessingStepParameters(BaseModel):
121
157
  model_config = ConfigDict(
122
158
  extra='allow',
@@ -278,52 +314,9 @@ class ProcessingStepFilterParameter(BaseModel):
278
314
  tags_by_or: List[str] | None = Field(None, alias='TagsByOr')
279
315
  is_public: bool | None = Field(None, alias='IsPublic')
280
316
  show_hidden: bool | None = Field(None, alias='ShowHidden')
317
+ show_deprecated: bool | None = Field(None, alias='ShowDeprecated')
281
318
  is_configured: bool | None = Field(None, alias='IsConfigured')
282
-
283
-
284
- class ProcessingStepHtoOpenApiProperties(BaseModel):
285
- model_config = ConfigDict(
286
- extra='allow',
287
- populate_by_name=True,
288
- )
289
- title: str | None = Field(None, alias='Title')
290
- owner_id: str | None = Field(
291
- None, alias='OwnerId', description='The owner of this resource'
292
- )
293
- created_by: str | None = Field(
294
- None, alias='CreatedBy', description='The creator of this resource'
295
- )
296
- version: str | None = Field(
297
- None, alias='Version', description='Version of the algorithm. Default = "0"'
298
- )
299
- function_name: str | None = Field(
300
- None,
301
- alias='FunctionName',
302
- description='Unique name (possibly human readable) for the function so that it can be identified',
303
- )
304
- short_description: str | None = Field(
305
- None, alias='ShortDescription', description='Human readable short description'
306
- )
307
- long_description: str | None = Field(
308
- None, alias='LongDescription', description='Human readable long description'
309
- )
310
- has_parameters: bool | None = Field(None, alias='HasParameters')
311
- is_public: bool | None = Field(None, alias='IsPublic')
312
- tags: List[str] | None = Field(None, alias='Tags')
313
- is_configured: bool | None = Field(None, alias='IsConfigured')
314
- hidden: bool | None = Field(None, alias='Hidden')
315
- created_at: AwareDatetime | None = Field(None, alias='CreatedAt')
316
- last_modified_at: AwareDatetime | None = Field(None, alias='LastModifiedAt')
317
- parameter_schema: str | None = Field(None, alias='ParameterSchema')
318
- default_parameters: str | None = Field(None, alias='DefaultParameters')
319
- return_schema: str | None = Field(None, alias='ReturnSchema')
320
- error_schema: str | None = Field(None, alias='ErrorSchema')
321
- input_data_slot_specification: List[DataSpecificationHto] | None = Field(
322
- None, alias='InputDataSlotSpecification'
323
- )
324
- output_data_slot_specification: List[DataSpecificationHto] | None = Field(
325
- None, alias='OutputDataSlotSpecification'
326
- )
319
+ deployment_state: DeploymentStates | None = Field(None, alias='DeploymentState')
327
320
 
328
321
 
329
322
  class ProcessingStepQueryResultHtoOpenApiProperties(BaseModel):
@@ -409,6 +402,23 @@ class RenameJobParameters(BaseModel):
409
402
  new_name: constr(min_length=1) = Field(..., alias='NewName')
410
403
 
411
404
 
405
+ class ScalingBehaviours(Enum):
406
+ aggressive = 'Aggressive'
407
+ balanced = 'Balanced'
408
+ conservative = 'Conservative'
409
+
410
+
411
+ class ScalingConfiguration(BaseModel):
412
+ model_config = ConfigDict(
413
+ extra='allow',
414
+ populate_by_name=True,
415
+ )
416
+ behaviour: ScalingBehaviours = Field(..., alias='Behaviour')
417
+ max_replicas: int = Field(
418
+ ..., alias='MaxReplicas', description='How many instances will run at most'
419
+ )
420
+
421
+
412
422
  class SelectProcessingParameters(BaseModel):
413
423
  model_config = ConfigDict(
414
424
  extra='allow',
@@ -625,7 +635,9 @@ class AdminProcessingStepFilterParameter(BaseModel):
625
635
  tags_by_or: List[str] | None = Field(None, alias='TagsByOr')
626
636
  is_public: bool | None = Field(None, alias='IsPublic')
627
637
  show_hidden: bool | None = Field(None, alias='ShowHidden')
638
+ show_deprecated: bool | None = Field(None, alias='ShowDeprecated')
628
639
  is_configured: bool | None = Field(None, alias='IsConfigured')
640
+ deployment_state: DeploymentStates | None = Field(None, alias='DeploymentState')
629
641
 
630
642
 
631
643
  class AdminProcessingStepQueryResultHtoOpenApi(BaseModel):
@@ -686,6 +698,20 @@ class AdminWorkDataQueryResultHtoOpenApi(BaseModel):
686
698
  links: List[Link] | None = None
687
699
 
688
700
 
701
+ class ConfigureDeploymentParameters(BaseModel):
702
+ model_config = ConfigDict(
703
+ extra='allow',
704
+ populate_by_name=True,
705
+ )
706
+ resource_preset: DeploymentResourcePresets = Field(..., alias='ResourcePreset')
707
+ entrypoint: constr(min_length=1) = Field(
708
+ ...,
709
+ alias='Entrypoint',
710
+ description='Specifies the container entry point, aka ProCon file to use',
711
+ )
712
+ scaling: ScalingConfiguration = Field(..., alias='Scaling')
713
+
714
+
689
715
  class EntryPointHtoOpenApi(BaseModel):
690
716
  model_config = ConfigDict(
691
717
  extra='allow',
@@ -814,17 +840,76 @@ class JobsRootHtoOpenApi(BaseModel):
814
840
  links: List[Link] | None = None
815
841
 
816
842
 
817
- class ProcessingStepHtoOpenApi(BaseModel):
843
+ class ProcessingStepDeploymentHto(BaseModel):
844
+ """
845
+ Displays the deployment configuration
846
+ """
847
+
818
848
  model_config = ConfigDict(
819
849
  extra='allow',
820
850
  populate_by_name=True,
821
851
  )
822
- class_: List[str] | None = Field(None, alias='class')
823
- title: str | None = None
824
- properties: ProcessingStepHtoOpenApiProperties | None = None
825
- entities: List | None = None
826
- actions: List[Action] | None = None
827
- links: List[Link] | None = None
852
+ resource_preset: DeploymentResourcePresets | None = Field(
853
+ None, alias='ResourcePreset'
854
+ )
855
+ scaling_behaviour: ScalingBehaviours | None = Field(None, alias='ScalingBehaviour')
856
+ max_replicas: int | None = Field(None, alias='MaxReplicas')
857
+ entrypoint: str | None = Field(None, alias='Entrypoint')
858
+
859
+
860
+ class ProcessingStepHtoOpenApiProperties(BaseModel):
861
+ model_config = ConfigDict(
862
+ extra='allow',
863
+ populate_by_name=True,
864
+ )
865
+ title: str | None = Field(None, alias='Title')
866
+ owner_id: str | None = Field(
867
+ None, alias='OwnerId', description='The owner of this resource'
868
+ )
869
+ created_by: str | None = Field(
870
+ None, alias='CreatedBy', description='The creator of this resource'
871
+ )
872
+ version: str | None = Field(
873
+ None, alias='Version', description='Version of the algorithm. Default = "0"'
874
+ )
875
+ function_name: str | None = Field(
876
+ None,
877
+ alias='FunctionName',
878
+ description='Unique name (possibly human-readable) for the function so that it can be identified',
879
+ )
880
+ short_description: str | None = Field(
881
+ None, alias='ShortDescription', description='Human-readable short description'
882
+ )
883
+ long_description: str | None = Field(
884
+ None, alias='LongDescription', description='Human-readable long description'
885
+ )
886
+ code_hash: str | None = Field(
887
+ None,
888
+ alias='CodeHash',
889
+ description='The hash of the code executing this function. This is intended to inhibit\nredeployment of different code for the same function version.',
890
+ )
891
+ has_parameters: bool | None = Field(None, alias='HasParameters')
892
+ is_public: bool | None = Field(None, alias='IsPublic')
893
+ tags: List[str] | None = Field(None, alias='Tags')
894
+ is_configured: bool | None = Field(None, alias='IsConfigured')
895
+ hidden: bool | None = Field(None, alias='Hidden')
896
+ is_deprecated: bool | None = Field(None, alias='IsDeprecated')
897
+ deprecated_on: AwareDatetime | None = Field(None, alias='DeprecatedOn')
898
+ reason_for_deprecation: str | None = Field(None, alias='ReasonForDeprecation')
899
+ created_at: AwareDatetime | None = Field(None, alias='CreatedAt')
900
+ last_modified_at: AwareDatetime | None = Field(None, alias='LastModifiedAt')
901
+ parameter_schema: str | None = Field(None, alias='ParameterSchema')
902
+ default_parameters: str | None = Field(None, alias='DefaultParameters')
903
+ return_schema: str | None = Field(None, alias='ReturnSchema')
904
+ error_schema: str | None = Field(None, alias='ErrorSchema')
905
+ input_data_slot_specification: List[DataSpecificationHto] | None = Field(
906
+ None, alias='InputDataSlotSpecification'
907
+ )
908
+ output_data_slot_specification: List[DataSpecificationHto] | None = Field(
909
+ None, alias='OutputDataSlotSpecification'
910
+ )
911
+ deployment_state: DeploymentStates | None = Field(None, alias='DeploymentState')
912
+ deployment: ProcessingStepDeploymentHto | None = Field(None, alias='Deployment')
828
913
 
829
914
 
830
915
  class ProcessingStepQueryResultHtoOpenApi(BaseModel):
@@ -1057,6 +1142,19 @@ class JobQueryParameters(BaseModel):
1057
1142
  include_remaining_tags: bool | None = Field(None, alias='IncludeRemainingTags')
1058
1143
 
1059
1144
 
1145
+ class ProcessingStepHtoOpenApi(BaseModel):
1146
+ model_config = ConfigDict(
1147
+ extra='allow',
1148
+ populate_by_name=True,
1149
+ )
1150
+ class_: List[str] | None = Field(None, alias='class')
1151
+ title: str | None = None
1152
+ properties: ProcessingStepHtoOpenApiProperties | None = None
1153
+ entities: List | None = None
1154
+ actions: List[Action] | None = None
1155
+ links: List[Link] | None = None
1156
+
1157
+
1060
1158
  class ProcessingStepQueryParameters(BaseModel):
1061
1159
  model_config = ConfigDict(
1062
1160
  extra='allow',
@@ -1,5 +1,9 @@
1
1
  import json as json_
2
2
  import warnings
3
+ import queue
4
+ import datetime
5
+ from datetime import datetime, timedelta
6
+
3
7
  from typing import Any, Self, List
4
8
 
5
9
  import httpx
@@ -7,6 +11,7 @@ from httpx import URL
7
11
  from pydantic import BaseModel, ConfigDict
8
12
 
9
13
  from pinexq_client.core import Link, MediaTypes, ClientException, ApiException
14
+ from pinexq_client.core.api_event_manager import ApiEventManagerSingleton
10
15
  from pinexq_client.core.polling import wait_until, PollingException
11
16
  from pinexq_client.job_management.enterjma import enter_jma
12
17
  from pinexq_client.job_management.hcos import WorkDataLink, ProcessingStepLink, InputDataSlotHco, OutputDataSlotHco
@@ -92,7 +97,7 @@ class Job:
92
97
  .select_processing(processing_step='job_processing')
93
98
  .configure_parameters(**job_parameters)
94
99
  .start()
95
- .wait_for_state(JobStates.completed)
100
+ .wait_for_completion()
96
101
  .delete()
97
102
  )
98
103
  """
@@ -127,7 +132,7 @@ class Job:
127
132
  The newly created job as `Job` object
128
133
  """
129
134
  job_link = self._jobs_root.create_job_action.execute(
130
- CreateJobParameters(name=name)
135
+ CreateJobParameters(Name=name)
131
136
  )
132
137
  self._get_by_link(job_link)
133
138
  return self
@@ -182,7 +187,7 @@ class Job:
182
187
  self._raise_if_no_hco()
183
188
  parent_job_url = self.job_hco.self_link.get_url()
184
189
  sub_job_link = self._jobs_root.create_subjob_action.execute(
185
- CreateSubJobParameters(name=name, parent_job_url=str(parent_job_url))
190
+ CreateSubJobParameters(Name=name, ParentJobUrl=str(parent_job_url))
186
191
  )
187
192
  sub_job = Job(self._client)
188
193
  sub_job._get_by_link(sub_job_link)
@@ -256,10 +261,10 @@ class Job:
256
261
  raise TypeError('Instance passed to "function_name" is not of type "str"')
257
262
  # ToDo: provide more parameters to query a processing step
258
263
  query_param = ProcessingStepQueryParameters(
259
- filter=ProcessingStepFilterParameter(
260
- function_name=function_name,
261
- function_name_match_type=FunctionNameMatchTypes.match_exact,
262
- version=function_version
264
+ Filter=ProcessingStepFilterParameter(
265
+ FunctionName=function_name,
266
+ FunctionNameMatchType=FunctionNameMatchTypes.match_exact,
267
+ Version=function_version
263
268
  )
264
269
  )
265
270
  query_result = self._processing_step_root.query_action.execute(query_param)
@@ -271,7 +276,7 @@ class Job:
271
276
  processing_url = query_result.processing_steps[0].self_link.get_url()
272
277
 
273
278
  self.job_hco.select_processing_action.execute(
274
- SelectProcessingParameters(processing_step_url=str(processing_url))
279
+ SelectProcessingParameters(ProcessingStepUrl=str(processing_url))
275
280
  )
276
281
 
277
282
  self.refresh()
@@ -322,13 +327,64 @@ class Job:
322
327
  result = self.job_hco.result
323
328
  return json_.loads(result) if result else None
324
329
 
325
- def wait_for_state(self, state: JobStates, timeout_ms: int = 5000, polling_interval_ms: int = 1000) -> Self:
330
+ def wait_for_state_sse(self, state: JobStates, timeout_s: float | None = None, fallback_polling_interval_s: float = 300) -> Self:
331
+ self._raise_if_no_hco()
332
+
333
+ # early exit
334
+ if self.job_hco.state == state:
335
+ return self
336
+ if self.job_hco.state == JobStates.error:
337
+ error_reason = self.job_hco.error_description
338
+ raise PollingException(f"Job failed'. Error:{error_reason}")
339
+
340
+ if timeout_s is None:
341
+ function_timeout_on = datetime.max
342
+ else:
343
+ # end this wait hard
344
+ function_timeout_on = datetime.now() + timedelta(seconds=timeout_s)
345
+
346
+ job_changed_signal = queue.Queue()
347
+ manager = ApiEventManagerSingleton()
348
+ job_url_str = str(self.job_hco.self_link.get_url())
349
+ manager.subscribe_waiter(self._client, job_url_str, job_changed_signal)
350
+
351
+ try:
352
+ self.get_state()
353
+ job_done = self.job_hco.state == state
354
+ while not job_done:
355
+ time_till_function_timeout = function_timeout_on - datetime.now()
356
+ if time_till_function_timeout.total_seconds() <= 0.0:
357
+ raise PollingException(f"Timeout waiting for Job state. Current state: {self.job_hco.state}")
358
+
359
+ next_wait_timeout_s = min(float(time_till_function_timeout.seconds), fallback_polling_interval_s)
360
+
361
+ try:
362
+ job_changed_signal.get(timeout=next_wait_timeout_s)
363
+ except queue.Empty:
364
+ # nothing we do just a poll and loop again
365
+ pass
366
+
367
+ # read all messages since we only want to poll new state once if there are multiple messages
368
+ while not job_changed_signal.empty():
369
+ job_changed_signal.get(timeout=next_wait_timeout_s)
370
+
371
+ self.get_state()
372
+ job_done = self.job_hco.state == state
373
+ if self.job_hco.state == JobStates.error:
374
+ error_reason = self.job_hco.error_description
375
+ raise PollingException(f"Job failed'. Error:{error_reason}")
376
+ finally:
377
+ manager.unsubscribe_waiter(self._client, job_url_str, job_changed_signal)
378
+
379
+ return self
380
+
381
+ def wait_for_state(self, state: JobStates, timeout_s: float | None = None, polling_interval_s: float = 1) -> Self:
326
382
  """Wait for this job to reach a state. If the job enters error state an exception is risen
327
383
 
328
384
  Args:
329
385
  state: The state to wait for. After the job enters this state this function returns.
330
- timeout_ms: Time span in milliseconds to wait for reaching the state before raising an exception.
331
- polling_interval_ms: will determine how fast the API is polled for updates.
386
+ timeout_s: Time span in seconds to wait for reaching the state before raising an exception.
387
+ polling_interval_s: will determine how fast the API is polled for updates.
332
388
  Note that low values will produce unnecessary load.
333
389
 
334
390
  Returns:
@@ -340,10 +396,10 @@ class Job:
340
396
  try:
341
397
  wait_until(
342
398
  condition=lambda: self.get_state() == state,
343
- timeout_ms=timeout_ms,
399
+ timeout_ms= int(timeout_s * 1000) if timeout_s is not None else None,
344
400
  timeout_message="Waiting for job completion",
345
401
  error_condition=lambda: self.job_hco.state == JobStates.error,
346
- polling_interval_ms=polling_interval_ms
402
+ polling_interval_ms= int(polling_interval_s * 1000)
347
403
  )
348
404
  except TimeoutError as timeout:
349
405
  raise TimeoutError(
@@ -358,18 +414,17 @@ class Job:
358
414
 
359
415
  return self
360
416
 
361
- def wait_for_completion(self, timeout_ms: int = 60000, polling_interval_ms: int = 500) -> Self:
417
+ def wait_for_completion(self, timeout_s: float | None = None) -> Self:
362
418
  """Wait for this job to reach the state 'completed'.
363
419
 
364
420
  Args:
365
- timeout_ms: Timeout to wait for the job to reach the next state.
366
- polling_interval_ms: will determine how fast the API is polled for updates.
421
+ timeout_s: Timeout to wait for the job to reach the next state.
367
422
  Note that low values will produce unnecessary load.
368
423
 
369
424
  Returns:
370
425
  This `Job` object
371
426
  """
372
- return self.wait_for_state(JobStates.completed, timeout_ms, polling_interval_ms)
427
+ return self.wait_for_state_sse(JobStates.completed, timeout_s)
373
428
 
374
429
  def assign_input_dataslot(
375
430
  self,
@@ -404,7 +459,7 @@ class Job:
404
459
  dataslot = self.job_hco.input_dataslots[index]
405
460
  dataslot.select_workdata_action.execute(
406
461
  parameters=SelectWorkDataForDataSlotParameters(
407
- work_data_url=str(work_data.get_url())
462
+ WorkDataUrl=str(work_data.get_url())
408
463
  )
409
464
  )
410
465
  self.refresh()
@@ -445,7 +500,7 @@ class Job:
445
500
  dataslot = self.job_hco.input_dataslots[index]
446
501
  dataslot.select_workdata_collection_action.execute(
447
502
  parameters=SelectWorkDataCollectionForDataSlotParameters(
448
- work_data_urls=[str(workdata_link.get_url()) for workdata_link in work_datas]
503
+ WorkDataUrls=[str(workdata_link.get_url()) for workdata_link in work_datas]
449
504
  )
450
505
  )
451
506
  self.refresh()
@@ -483,15 +538,15 @@ class Job:
483
538
  ) -> JobQueryResultHco:
484
539
  self._raise_if_no_hco()
485
540
  filter_param = JobFilterParameter(
486
- is_sub_job=True,
487
- parent_job_url=str(self.job_hco.self_link.get_url()),
488
- state=state,
489
- name=name,
541
+ IsSubJob=True,
542
+ ParentJobUrl=str(self.job_hco.self_link.get_url()),
543
+ State=state,
544
+ Name=name,
490
545
  show_deleted=show_deleted,
491
- processing_step_url=processing_step_url,
546
+ ProcessingStepUrl=processing_step_url,
492
547
  )
493
548
 
494
- query_param = JobQueryParameters(sort_by=sort_by, filter=filter_param)
549
+ query_param = JobQueryParameters(SortBy=sort_by, Filter=filter_param)
495
550
  job_query_result = self._jobs_root.job_query_action.execute(query_param)
496
551
  return job_query_result
497
552
 
@@ -511,32 +566,32 @@ class Job:
511
566
  query_result = self._get_sub_jobs(state=state)
512
567
  return query_result.total_entities
513
568
 
514
- def wait_for_sub_jobs_complete(self, timeout_ms: int = 60000, polling_interval_ms: int = 1000) -> Self:
569
+ def wait_for_sub_jobs_complete(self, timeout_s: float = 60, polling_interval_s: float = 1) -> Self:
515
570
  """Wait for all sub-jobs to reach the state 'completed'.
516
571
 
517
572
  This function will block execution until the state is reached or raise an exception
518
573
  if the operation timed out or a sub-job returned an error. Only started jobs will be watched.
519
574
 
520
575
  Args:
521
- timeout_ms: Timeout to wait for the sub-jobs to reach the next state.
576
+ timeout_s: Timeout to wait for the sub-jobs to reach the next state.
522
577
 
523
578
  Returns:
524
579
  This `Job` object
525
- :param timeout_ms: Wil determine how long to wait for success
526
- :param polling_interval_ms: will determine how fast the API is polled for updates.
580
+ :param timeout_s: Wil determine how long to wait for success
581
+ :param polling_interval_s: will determine how fast the API is polled for updates.
527
582
  Note that low values will produce unnecessary load.
528
583
  """
529
584
  wait_until(
530
585
  condition=lambda: self.sub_jobs_in_state(JobStates.pending) == 0,
531
- timeout_ms=timeout_ms,
532
- timeout_message=f"Timeout while waiting for sub-jobs to complete! [timeout: {timeout_ms}ms]",
533
- polling_interval_ms=polling_interval_ms
586
+ timeout_ms= int(timeout_s * 1000),
587
+ timeout_message=f"Timeout while waiting for sub-jobs to complete! [timeout: {timeout_s}s]",
588
+ polling_interval_ms= int(polling_interval_s * 1000)
534
589
  )
535
590
  wait_until(
536
591
  condition=lambda: self.sub_jobs_in_state(JobStates.processing) == 0,
537
- timeout_ms=timeout_ms,
538
- timeout_message=f"Timeout while waiting for sub-jobs to complete! [timeout: {timeout_ms}ms]",
539
- polling_interval_ms=polling_interval_ms
592
+ timeout_ms= int(timeout_s * 1000),
593
+ timeout_message=f"Timeout while waiting for sub-jobs to complete! [timeout: {timeout_s}ms]",
594
+ polling_interval_ms= int(polling_interval_s * 1000)
540
595
  )
541
596
 
542
597
  error_count = self.sub_jobs_in_state(JobStates.error)
@@ -633,7 +688,7 @@ class Job:
633
688
  warnings.warn(f"Could not delete job: {self.self_link().get_url()}\n{e}")
634
689
 
635
690
  # finally delete input workdatas
636
- if delete_input_workdata is True:
691
+ if delete_input_workdata:
637
692
  for slot in self.job_hco.input_dataslots:
638
693
  for wd in slot.selected_workdatas:
639
694
  try:
@@ -692,7 +747,7 @@ class Job:
692
747
  """
693
748
  self._raise_if_no_hco()
694
749
  self.job_hco.edit_tags_action.execute(
695
- SetJobTagsParameters(tags=tags)
750
+ SetJobTagsParameters(Tags=tags)
696
751
  )
697
752
  self.refresh()
698
753
  return self
@@ -730,14 +785,23 @@ class Job:
730
785
  processing_step_instance: ProcessingStep | None = None,
731
786
  start: bool = True,
732
787
  parameters: str | None = None,
733
- allow_output_data_slots: bool | None = None,
788
+ allow_output_data_deletion: bool | None = None,
734
789
  input_data_slots: List[InputDataSlotParameterFlexible] | None = None,
735
790
  ) -> Self:
736
791
  """
737
792
  Creates a new job and configures it rapidly with RapidJobSetupParameters.
738
793
 
739
794
  Args:
740
- parameters: The parameters to configure the job with.
795
+ name: Name of the job to be created
796
+ parent_job_url: URL of the parent job as JobLink. Only one of parent_job_url or parent_job_instance must be provided.
797
+ parent_job_instance: Parent job as Job instance. Only one of parent_job_url or parent_job_instance must be provided.
798
+ tags: Tags to assign to the job
799
+ processing_step_url: URL of the processing step as ProcessingStepLink. Only one of processing_step_url or processing_step_instance must be provided.
800
+ processing_step_instance: Processing step as ProcessingStep instance. Only one of processing_step_url or processing_step_instance must be provided.
801
+ start: Flag indicating whether to start the job after creation
802
+ parameters: Input parameters to the job
803
+ allow_output_data_deletion: Flag indicating whether to allow output data deletion
804
+ input_data_slots: List of InputDataSlotParameterFlexible to assign work data to input data slots
741
805
 
742
806
  Returns:
743
807
  The newly created job as `Job` object
@@ -785,7 +849,7 @@ class Job:
785
849
  Tags=tags,
786
850
  Start=start,
787
851
  Parameters=parameters,
788
- AllowOutputDataDeletion=allow_output_data_slots,
852
+ AllowOutputDataDeletion=allow_output_data_deletion,
789
853
  InputDataSlots=input_data_slots
790
854
  )
791
855