pinexq-client 0.2.0.2024.607.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. hypermedia_client/core/__init__.py +8 -0
  2. hypermedia_client/core/base_relations.py +8 -0
  3. hypermedia_client/core/enterapi.py +17 -0
  4. hypermedia_client/core/exceptions.py +2 -0
  5. hypermedia_client/core/hco/__init__.py +0 -0
  6. hypermedia_client/core/hco/action_hco.py +70 -0
  7. hypermedia_client/core/hco/action_with_parameters_hco.py +86 -0
  8. hypermedia_client/core/hco/download_link_hco.py +37 -0
  9. hypermedia_client/core/hco/hco_base.py +91 -0
  10. hypermedia_client/core/hco/link_hco.py +57 -0
  11. hypermedia_client/core/hco/upload_action_hco.py +113 -0
  12. hypermedia_client/core/http_headers.py +9 -0
  13. hypermedia_client/core/media_types.py +24 -0
  14. hypermedia_client/core/model/__init__.py +0 -0
  15. hypermedia_client/core/model/error.py +9 -0
  16. hypermedia_client/core/model/sirenmodels.py +155 -0
  17. hypermedia_client/core/polling.py +37 -0
  18. hypermedia_client/core/sirenaccess.py +173 -0
  19. hypermedia_client/job_management/__init__.py +6 -0
  20. hypermedia_client/job_management/enterjma.py +42 -0
  21. hypermedia_client/job_management/hcos/__init__.py +12 -0
  22. hypermedia_client/job_management/hcos/entrypoint_hco.py +57 -0
  23. hypermedia_client/job_management/hcos/info_hco.py +42 -0
  24. hypermedia_client/job_management/hcos/input_dataslot_hco.py +82 -0
  25. hypermedia_client/job_management/hcos/job_hco.py +174 -0
  26. hypermedia_client/job_management/hcos/job_query_result_hco.py +63 -0
  27. hypermedia_client/job_management/hcos/job_used_tags_hco.py +30 -0
  28. hypermedia_client/job_management/hcos/jobsroot_hco.py +80 -0
  29. hypermedia_client/job_management/hcos/output_dataslot_hco.py +44 -0
  30. hypermedia_client/job_management/hcos/processing_step_hco.py +71 -0
  31. hypermedia_client/job_management/hcos/processing_step_used_tags_hco.py +30 -0
  32. hypermedia_client/job_management/hcos/processingstep_query_result_hco.py +68 -0
  33. hypermedia_client/job_management/hcos/processingsteproot_hco.py +72 -0
  34. hypermedia_client/job_management/hcos/user_hco.py +37 -0
  35. hypermedia_client/job_management/hcos/workdata_hco.py +127 -0
  36. hypermedia_client/job_management/hcos/workdata_query_result_hco.py +67 -0
  37. hypermedia_client/job_management/hcos/workdata_used_tags_query_result_hco.py +30 -0
  38. hypermedia_client/job_management/hcos/workdataroot_hco.py +84 -0
  39. hypermedia_client/job_management/ideas.md +28 -0
  40. hypermedia_client/job_management/known_relations.py +29 -0
  41. hypermedia_client/job_management/model/__init__.py +1 -0
  42. hypermedia_client/job_management/model/open_api_generated.py +890 -0
  43. hypermedia_client/job_management/model/sirenentities.py +112 -0
  44. hypermedia_client/job_management/tool/__init__.py +1 -0
  45. hypermedia_client/job_management/tool/job.py +442 -0
  46. pinexq_client-0.2.0.2024.607.8.dist-info/METADATA +105 -0
  47. pinexq_client-0.2.0.2024.607.8.dist-info/RECORD +49 -0
  48. pinexq_client-0.2.0.2024.607.8.dist-info/WHEEL +4 -0
  49. pinexq_client-0.2.0.2024.607.8.dist-info/licenses/LICENSE +19 -0
@@ -0,0 +1,112 @@
1
+ from pydantic import BaseModel, ConfigDict, Field
2
+
3
+ from hypermedia_client.core import Entity
4
+ from hypermedia_client.job_management.model.open_api_generated import (
5
+ InfoHtoOpenApiProperties,
6
+ EntryPointHtoOpenApiProperties,
7
+ JobsRootHtoOpenApiProperties,
8
+ JobQueryResultHtoOpenApiProperties,
9
+ JobHtoOpenApiProperties,
10
+ WorkDataHtoOpenApiProperties,
11
+ ProcessingStepHtoOpenApiProperties,
12
+ WorkDataQueryResultHtoOpenApiProperties,
13
+ WorkDataRootHtoOpenApiProperties,
14
+ ProcessingStepRootHtoOpenApiProperties,
15
+ ProcessingStepQueryResultHtoOpenApiProperties,
16
+ JobUsedTagsHtoOpenApiProperties,
17
+ ProcessingStepUsedTagsHtoOpenApiProperties,
18
+ UserHtoOpenApiProperties,
19
+ WorkDataUsedTagsQueryResultHtoOpenApiProperties,
20
+ )
21
+
22
+
23
+ # ToDo: make these Generics bound to Entity
24
+
25
+
26
+ class EntryPointEntity(Entity):
27
+ properties: EntryPointHtoOpenApiProperties | None = None
28
+
29
+
30
+ class InfoEntity(Entity):
31
+ properties: InfoHtoOpenApiProperties | None = None
32
+
33
+
34
+ class JobsRootEntity(Entity):
35
+ properties: JobsRootHtoOpenApiProperties | None = None
36
+
37
+
38
+ class JobQueryResultEntity(Entity):
39
+ properties: JobQueryResultHtoOpenApiProperties | None = None
40
+
41
+
42
+ class JobEntity(Entity):
43
+ properties: JobHtoOpenApiProperties | None = None
44
+
45
+
46
+ class WorkDataEntity(Entity):
47
+ properties: WorkDataHtoOpenApiProperties | None = None
48
+
49
+
50
+ class WorkDataRootEntity(Entity):
51
+ properties: WorkDataRootHtoOpenApiProperties | None = None
52
+
53
+
54
+ class WorkDataQueryResultEntity(Entity):
55
+ properties: WorkDataQueryResultHtoOpenApiProperties | None = None
56
+
57
+
58
+ class ProcessingStepEntity(Entity):
59
+ properties: ProcessingStepHtoOpenApiProperties | None = None
60
+
61
+
62
+ class ProcessingStepsRootEntity(Entity):
63
+ properties: ProcessingStepRootHtoOpenApiProperties | None = None
64
+
65
+
66
+ class ProcessingStepQueryResultEntity(Entity):
67
+ properties: ProcessingStepQueryResultHtoOpenApiProperties | None = None
68
+
69
+
70
+ class WorkDataUsedTagsQueryResultEntity(Entity):
71
+ properties: WorkDataUsedTagsQueryResultHtoOpenApiProperties | None = None
72
+
73
+
74
+ class ProcessingStepUsedTagsEntity(Entity):
75
+ properties: ProcessingStepUsedTagsHtoOpenApiProperties | None = None
76
+
77
+
78
+ class JobUsedTagsEntity(Entity):
79
+ properties: JobUsedTagsHtoOpenApiProperties | None = None
80
+
81
+
82
+ class InputDataSlotHtoProperties(BaseModel):
83
+ model_config = ConfigDict(
84
+ extra="forbid",
85
+ populate_by_name=True,
86
+ )
87
+ is_configured: bool | None = Field(None, alias="IsConfigured")
88
+ title: str | None = Field(None, alias="Title")
89
+ description: str | None = Field(None, alias="Description")
90
+ media_type: str | None = Field(None, alias="MediaType")
91
+
92
+
93
+ class InputDataSlotEntity(Entity):
94
+ properties: InputDataSlotHtoProperties | None = None
95
+
96
+
97
+ class OutputDataSlotHtoProperties(BaseModel):
98
+ model_config = ConfigDict(
99
+ extra="forbid",
100
+ populate_by_name=True,
101
+ )
102
+ title: str | None = Field(None, alias="Title")
103
+ description: str | None = Field(None, alias="Description")
104
+ media_type: str | None = Field(None, alias="MediaType")
105
+
106
+
107
+ class OutputDataSlotEntity(Entity):
108
+ properties: OutputDataSlotHtoProperties | None = None
109
+
110
+
111
+ class UserEntity(Entity):
112
+ properties: UserHtoOpenApiProperties | None = None
@@ -0,0 +1 @@
1
+ from .job import Job
@@ -0,0 +1,442 @@
1
+ import json as json_
2
+ from typing import Any, Self
3
+
4
+ import httpx
5
+ from httpx import URL
6
+
7
+ from hypermedia_client.core import Link, MediaTypes
8
+ from hypermedia_client.core.polling import wait_until, PollingException
9
+ from hypermedia_client.job_management.enterjma import enter_jma
10
+ from hypermedia_client.job_management.hcos import WorkDataLink
11
+ from hypermedia_client.job_management.hcos.entrypoint_hco import EntryPointHco
12
+ from hypermedia_client.job_management.hcos.job_hco import (
13
+ JobHco,
14
+ GenericProcessingConfigureParameters,
15
+ JobLink,
16
+ )
17
+ from hypermedia_client.job_management.hcos.job_query_result_hco import JobQueryResultHco
18
+ from hypermedia_client.job_management.hcos.jobsroot_hco import JobsRootHco
19
+ from hypermedia_client.job_management.hcos.processingsteproot_hco import (
20
+ ProcessingStepsRootHco,
21
+ )
22
+ from hypermedia_client.job_management.known_relations import Relations
23
+ from hypermedia_client.job_management.model import (
24
+ CreateJobParameters,
25
+ ProcessingStepQueryParameters,
26
+ ProcessingStepFilterParameter,
27
+ SelectProcessingParameters,
28
+ JobStates,
29
+ CreateSubJobParameters,
30
+ JobQueryParameters,
31
+ JobSortPropertiesSortParameter,
32
+ JobFilterParameter,
33
+ SelectWorkDataForDataSlotParameters, SetJobTagsParameters, SelectWorkDataCollectionForDataSlotParameters,
34
+ )
35
+
36
+
37
+ class Job:
38
+ """Convenience wrapper for handling JobHcos in the JobManagement-Api.
39
+
40
+ This wrapper allows the API to be used with a fluent-style builder pattern:
41
+
42
+ job = (
43
+ Job(client)
44
+ .create(name='JobName')
45
+ .select_processing(processing_step='job_processing')
46
+ .configure_parameters(**job_parameters)
47
+ .start()
48
+ .wait_for_state(JobStates.completed)
49
+ .delete()
50
+ )
51
+ """
52
+
53
+ _client: httpx.Client
54
+ _entrypoint: EntryPointHco
55
+ _jobs_root: JobsRootHco
56
+ _job: JobHco | None = None
57
+ _processing_step_root: ProcessingStepsRootHco
58
+
59
+ def __init__(self, client: httpx.Client):
60
+ """
61
+
62
+ Args:
63
+ client: An httpx.Client instance initialized with the api-host-url as `base_url`
64
+ """
65
+ self._client = client
66
+ self._entrypoint = enter_jma(client)
67
+ self._jobs_root = self._entrypoint.job_root_link.navigate()
68
+ self._processing_step_root = (
69
+ self._entrypoint.processing_step_root_link.navigate()
70
+ )
71
+
72
+ def create(self, name: str) -> Self:
73
+ """
74
+ Creates a new job by name.
75
+
76
+ Args:
77
+ name: Name of the job to be created
78
+
79
+ Returns:
80
+ The newly created job as `Job` object
81
+ """
82
+ job_link = self._jobs_root.create_job_action.execute(
83
+ CreateJobParameters(name=name)
84
+ )
85
+ self._get_by_link(job_link)
86
+ return self
87
+
88
+ def _get_by_link(self, job_link: JobLink):
89
+ self._job = job_link.navigate()
90
+
91
+ @classmethod
92
+ def from_url(cls, client: httpx.Client, job_url: URL) -> Self:
93
+ """Initializes a `Job` object from an existing job given by its link as URL.
94
+
95
+ Args:
96
+ client: An httpx.Client instance initialized with the api-host-url as `base_url`
97
+ job_url:
98
+
99
+ Returns:
100
+ The newly created job as `Job` object
101
+ """
102
+ link = Link.from_url(
103
+ job_url,
104
+ [str(Relations.CREATED_RESSOURCE)],
105
+ "Created sub-job",
106
+ MediaTypes.SIREN,
107
+ )
108
+ job_instance = cls(client)
109
+ job_instance._get_by_link(JobLink.from_link(client, link))
110
+ return job_instance
111
+
112
+ def create_sub_job(self, name: str) -> "Job":
113
+ """Create a new job by name as a sub-job of the current one.
114
+
115
+ Args:
116
+ name:
117
+ Name of the job to be created
118
+ Returns:
119
+ The newly created job as `Job` object
120
+ """
121
+ parent_job_url = self._job.self_link.get_url()
122
+ sub_job_link = self._jobs_root.create_subjob_action.execute(
123
+ CreateSubJobParameters(name=name, parent_job_url=str(parent_job_url))
124
+ )
125
+ sub_job = Job(self._client)
126
+ sub_job._get_by_link(sub_job_link)
127
+ return sub_job
128
+
129
+ def refresh(self) -> Self:
130
+ """Updates the job from the server
131
+
132
+ Returns:
133
+ This `Job` object, but with updated properties.
134
+ """
135
+ self._job = self._job.self_link.navigate()
136
+ return self
137
+
138
+ def get_state(self) -> JobStates:
139
+ """Returns the current state of this job from the server
140
+
141
+ Returns:
142
+ The current state of this `Job` from JobStates
143
+ """
144
+ self.refresh()
145
+ return self._job.state
146
+
147
+ def select_processing(self, processing_step: str) -> Self:
148
+ """Set the processing step for this job given by name. This will query all
149
+ processing steps of this name from the server and select the first result.
150
+
151
+ Args:
152
+ processing_step: Name of the processing step as string
153
+
154
+ Returns:
155
+ This `Job` object
156
+ """
157
+ # ToDo: provide more parameters to query a processing step
158
+ query_param = ProcessingStepQueryParameters(
159
+ filter=ProcessingStepFilterParameter(
160
+ function_name_contains=processing_step,
161
+ )
162
+ )
163
+ query_result = self._processing_step_root.query_action.execute(query_param)
164
+ candidates = [p for p in query_result.processing_steps if p.function_name == processing_step]
165
+ if len(candidates) == 0:
166
+ raise AttributeError(f"No processing step with the name '{processing_step}' registered!")
167
+ if len(candidates) > 1:
168
+ raise AttributeError(f"Multiple results querying processing step '{processing_step}'!")
169
+ assert len(candidates) == 1
170
+ # Todo: For now we choose the first and only result. Make this more flexible?
171
+ processing_url = candidates[0].self_link.get_url()
172
+
173
+ self._job.select_processing_action.execute(
174
+ SelectProcessingParameters(processing_step_url=str(processing_url))
175
+ )
176
+
177
+ self.refresh()
178
+
179
+ return self
180
+
181
+ def configure_parameters(self, **parameters: Any) -> Self:
182
+ """Set the parameters to run the processing step with.
183
+
184
+ Args:
185
+ **parameters: Any keyword parameters provided will be forwarded as parameters
186
+ to the processing step function.
187
+
188
+ Returns:
189
+ This `Job` object
190
+ """
191
+ self._job.configure_processing_action.execute(
192
+ GenericProcessingConfigureParameters.model_validate(parameters)
193
+ )
194
+
195
+ self.refresh()
196
+ return self
197
+
198
+ def start(self) -> Self:
199
+ """Start processing this job.
200
+
201
+ Returns:
202
+ This `Job` object
203
+ """
204
+ self._job.start_processing_action.execute()
205
+ self.refresh()
206
+ return self
207
+
208
+ def get_result(self) -> Any:
209
+ """Get the return value of the processing step after its completion.
210
+
211
+ This value is not defined before completion, so check the state first or
212
+ wait explicitly for it to complete.
213
+
214
+ Returns:
215
+ The result of the processing step
216
+ """
217
+ # TODO: return Sentinel or Exception on 'NotDoneYet'
218
+ # TODO: handle return value equivalent to asyncio's Future objects
219
+ self.refresh()
220
+ result = self._job.result
221
+ return json_.loads(result) if result else None
222
+
223
+ def wait_for_state(self, state: JobStates, timeout_ms: int = 5000) -> Self:
224
+ """Wait for this job to reach a state.
225
+
226
+ Args:
227
+ state: The state to wait for. After the job enters this state this function returns.
228
+ timeout_ms: Time span in milliseconds to wait for reaching the state before
229
+ raising an exception.
230
+
231
+ Returns:
232
+ This `Job` object
233
+ """
234
+ try:
235
+ wait_until(
236
+ condition=lambda: self.get_state() == state,
237
+ timeout_ms=timeout_ms,
238
+ timeout_message="Waiting for job completion",
239
+ error_condition=lambda: self._job.state == JobStates.error,
240
+ )
241
+ except TimeoutError as timeout:
242
+ raise Exception(
243
+ f"Job did not reach state: '{state.value}' "
244
+ f"current state: '{self.get_state().value}'. Error:{str(timeout)}"
245
+ )
246
+ except PollingException:
247
+ if self._job.state == JobStates.error:
248
+ error_reason = self._job.error_description
249
+ raise Exception(f"Job failed'. Error:{error_reason}")
250
+ raise Exception("Job failed")
251
+
252
+ return self
253
+
254
+ def assign_input_dataslot(self, index: int, workdata_link: WorkDataLink) -> Self:
255
+ """Assign WorkData to DataSlots.
256
+
257
+ Args:
258
+ index: The numerical index of the dataslot.
259
+ workdata_link: WorkData given by its URL
260
+
261
+ Returns:
262
+ This `Job` object
263
+ """
264
+ dataslot = self._job.input_dataslots[index]
265
+ dataslot.select_workdata_action.execute(
266
+ parameters=SelectWorkDataForDataSlotParameters(
267
+ work_data_url=str(workdata_link.get_url())
268
+ )
269
+ )
270
+ self.refresh()
271
+
272
+ return self
273
+
274
+ def assign_collection_input_dataslot(self, index: int, workdata_links: list[WorkDataLink]) -> Self:
275
+ """Assign WorkData to DataSlots.
276
+
277
+ Args:
278
+ index: The numerical index of the dataslot.
279
+ workdata_links: WorkData collection given by their URLs
280
+
281
+ Returns:
282
+ This `Job` object
283
+ """
284
+ dataslot = self._job.input_dataslots[index]
285
+ dataslot.select_workdata_collection_action.execute(
286
+ parameters=SelectWorkDataCollectionForDataSlotParameters(
287
+ work_data_urls=list[str](str(workdata_link.get_url()) for workdata_link in workdata_links)
288
+ )
289
+ )
290
+ self.refresh()
291
+
292
+ return self
293
+
294
+ def clear_input_dataslot(self, index: int) -> Self:
295
+ """Clear the selected WorkData for a dataslot.
296
+
297
+ Args:
298
+ index: he numerical index of the dataslot.
299
+
300
+ Returns:
301
+ This `Job` object
302
+ """
303
+ dataslot = self._job.input_dataslots[index]
304
+
305
+ # already cleared
306
+ if not dataslot.clear_workdata_action:
307
+ return
308
+
309
+ dataslot.clear_workdata_action.execute()
310
+ self.refresh()
311
+
312
+ return self
313
+
314
+ def _get_sub_jobs(
315
+ self,
316
+ sort_by: JobSortPropertiesSortParameter | None = None,
317
+ state: JobStates | None = None,
318
+ name: str | None = None,
319
+ show_deleted: bool | None = None,
320
+ processing_step_url: str | None = None,
321
+ ) -> JobQueryResultHco:
322
+ filter_param = JobFilterParameter(
323
+ is_sub_job=True,
324
+ parent_job_url=str(self._job.self_link.get_url()),
325
+ state=state,
326
+ name=name,
327
+ show_deleted=show_deleted,
328
+ processing_step_url=processing_step_url,
329
+ )
330
+ query_param = JobQueryParameters(sort_by=sort_by, filter=filter_param)
331
+ job_query_result = self._jobs_root.job_query_action.execute(query_param)
332
+ return job_query_result
333
+
334
+ def get_sub_jobs(self, **tbd):
335
+ # todo: Query result iterator to go through paginated result
336
+ raise NotImplementedError
337
+
338
+ def sub_jobs_in_state(self, state: JobStates) -> int:
339
+ """Query how many sub-job are in a specific state.
340
+
341
+ Args:
342
+ state: Job state as `JobStates` enum.
343
+
344
+ Returns:
345
+ The number of sub-jobs in the requested state.
346
+ """
347
+ query_result = self._get_sub_jobs(state=state)
348
+ return query_result.total_entities
349
+
350
+ def wait_for_sub_jobs_complete(self, timeout_ms: int = 0) -> Self:
351
+ """Wait for all sub-jobs to reach the state 'completed'.
352
+
353
+ This function will block execution until the state is reached or raise an exception
354
+ if the operation timed out or a sub-job returned an error.
355
+
356
+ Args:
357
+ timeout_ms: Timeout to wait for the sub-jobs to reach the next state.
358
+
359
+ Returns:
360
+ This `Job` object
361
+ """
362
+ wait_until(
363
+ condition=lambda: self.sub_jobs_in_state(JobStates.pending) == 0,
364
+ timeout_ms=timeout_ms,
365
+ timeout_message=f"Timeout while waiting for sub-jobs to complete! [timeout: {timeout_ms}ms]",
366
+ )
367
+ wait_until(
368
+ condition=lambda: self.sub_jobs_in_state(JobStates.processing) == 0,
369
+ timeout_ms=timeout_ms,
370
+ timeout_message=f"Timeout while waiting for sub-jobs to complete! [timeout: {timeout_ms}ms]",
371
+ )
372
+ wait_until(
373
+ condition=lambda: self.sub_jobs_in_state(JobStates.completed) >= 0,
374
+ error_condition=lambda: self.sub_jobs_in_state(JobStates.error) >= 0,
375
+ error_condition_message="One or more sub-jobs returned an error!",
376
+ timeout_ms=timeout_ms,
377
+ timeout_message=f"Timeout while waiting for sub-jobs to complete! [timeout: {timeout_ms}ms]",
378
+ )
379
+ return self
380
+
381
+ def hide(self) -> Self:
382
+ """Mark this job as hidden.
383
+
384
+ Returns:
385
+ This `Job` object
386
+ """
387
+ self._job.hide_action.execute()
388
+ self.refresh()
389
+ return self
390
+
391
+ def unhide(self):
392
+ """Reveal this job again.
393
+
394
+ Returns:
395
+ This `Job` object"""
396
+ self._job.unhide_action.execute()
397
+ self.refresh()
398
+ return self
399
+
400
+ def allow_output_data_deletion(self):
401
+ """Mark all output workdata from this job as "deletable".
402
+
403
+ Returns:
404
+ This `Job` object"""
405
+ self._job.allow_output_data_deletion_action.execute()
406
+ self.refresh()
407
+ return self
408
+
409
+ def disallow_output_data_deletion(self):
410
+ """Mark all output workdata from this job as "not deletable".
411
+
412
+ Returns:
413
+ This `Job` object"""
414
+ self._job.disallow_output_data_deletion_action.execute()
415
+ self.refresh()
416
+ return self
417
+
418
+ def set_tags(self, tags: list[str]):
419
+ """Set tags to the job.
420
+
421
+ Returns:
422
+ This `Job` object"""
423
+ self._job.edit_tags_action.execute(SetJobTagsParameters(
424
+ tags=tags
425
+ ))
426
+ self.refresh()
427
+ return self
428
+
429
+
430
+ def get_input_data_slots(self):
431
+ """Returns list of InputDataSlotHco objects.
432
+
433
+ Returns:
434
+ `list[InputDataSlotHco]` object"""
435
+ return self._job.input_dataslots
436
+
437
+ def get_output_data_slots(self):
438
+ """Returns list of OutputDataSlotHco objects.
439
+
440
+ Returns:
441
+ `list[OutputDataSlotHco]` object"""
442
+ return self._job.output_dataslots
@@ -0,0 +1,105 @@
1
+ Metadata-Version: 2.1
2
+ Name: pinexq-client
3
+ Version: 0.2.0.2024.607.8
4
+ Summary: A hypermedia-based client for the DataCybernetics PinexQ platform.
5
+ Author-Email: =?utf-8?q?Sebastian_H=C3=B6fer?= <hoefer@data-cybernetics.com>, Mathias Reichardt <reichardt@data-cybernetics.com>
6
+ Maintainer-Email: Mathias Reichardt <reichardt@data-cybernetics.com>, =?utf-8?q?Sebastian_H=C3=B6fer?= <hoefer@data-cybernetics.com>, Carsten Blank <blank@data-cybernetics.com>
7
+ License: MIT
8
+ Requires-Python: >=3.11
9
+ Requires-Dist: pydantic<3.0.0,>=2.1.0
10
+ Requires-Dist: httpx<1.0.0,>=0.25.0
11
+ Description-Content-Type: text/markdown
12
+
13
+ # Pine-x-Q Python Client
14
+
15
+ A hypermedia-based client for the DataCybernetics PinexQ platform.
16
+
17
+ This module contains the submodules:
18
+
19
+ - `core`: A generic *hypermedia client* (HC) to work with siren hypermedia APIs
20
+ [Siren on GitHub](https://github.com/kevinswiber/siren)
21
+
22
+ - `job_management`: Specialized HC-object (HCO) implementations for the PinexQ *Job-Management API* (JMA).
23
+
24
+ ## Installation
25
+
26
+ Install from PyPI by running
27
+
28
+ ```pip install pinexq-client```
29
+
30
+ ## Setup the Client
31
+
32
+ The hypermedia client uses [HTTPX](https://github.com/projectdiscovery/httpx) as a backend.
33
+ To access the API you need to provide a pre-configured HTTPX-client with a valid API key for authentication.
34
+ From The API key the user and permissions are derived so do not share the API key and store it securely.
35
+
36
+ To initialize the client parts must be supplied:
37
+
38
+ - The API key (as header)
39
+ - The API host as URL with port if required
40
+
41
+ ```python
42
+ from httpx import Client
43
+
44
+ from hypermedia_client.job_management.enterjma import enter_jma
45
+ from hypermedia_client.job_management.hcos.entrypoint_hco import EntryPointHco
46
+
47
+
48
+ client = Client(
49
+ base_url="https://myapihost.com:80",
50
+ headers={'x-api-key': '<SECRET_PAT>'}
51
+ )
52
+
53
+ # the client is now ready to be passed to function entering the API
54
+ entrypoint: EntryPointHco = enter_jma(client)
55
+ ```
56
+
57
+
58
+ ## Using the API
59
+
60
+ There is a convenience layer wrapping job-management-specific objects in interface classes for ease of use.
61
+ In the spirit of a hypermedia API you can also use low level calls to navigate the API.
62
+
63
+ ### Job-Management Interface
64
+
65
+ The main concepts of the job-management-API (e.g. "Jobs") are wrapped in
66
+
67
+ ```python
68
+ from hypermedia_client.job_management.tool import Job
69
+ from hypermedia_client.job_management.model import JobStates
70
+ job = (
71
+ Job(client)
72
+ .create(name="JobName")
73
+ .select_processing(processing_step="step_function_name")
74
+ .configure_parameters(param_name="value")
75
+ .start()
76
+ .wait_for_state(JobStates.completed)
77
+ )
78
+ result = job.get_result()
79
+ ```
80
+
81
+ ### Raw Hypermedia API
82
+
83
+ You can navigate and use the full API with raw hypermedia primitives, e.g. when features are not yet exposed in the convenience wrapper.
84
+
85
+ ```python
86
+ from hypermedia_client.job_management.enterjma import enter_jma
87
+ from hypermedia_client.job_management.hcos.entrypoint_hco import EntryPointHco
88
+ from hypermedia_client.job_management.model import CreateJobParameters, SetJobTagsParameters
89
+
90
+ entrypoint: EntryPointHco = enter_jma(client)
91
+ # Get to jobs root
92
+ job_root = entrypoint.job_root_link.navigate()
93
+ # Create a new job
94
+ parameters = CreateJobParameters(name="Test Job")
95
+ job = job_root.create_job_action.execute(parameters).navigate()
96
+ # Edit the job's tags
97
+ job.edit_tags_action.execute(SetJobTagsParameters(tags=["test", ]))
98
+
99
+ ...
100
+ ```
101
+
102
+
103
+
104
+
105
+