iqm-station-control-client 3.17.0__py3-none-any.whl → 4.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -41,20 +41,21 @@ from iqm.station_control.client.iqm_server.grpc_utils import (
41
41
  )
42
42
  from iqm.station_control.client.iqm_server.meta_class import IqmServerClientMeta
43
43
  from iqm.station_control.client.list_models import DutFieldDataList, DutList
44
- from iqm.station_control.client.serializers import deserialize_sweep_results, serialize_sweep_task_request
44
+ from iqm.station_control.client.serializers import deserialize_sweep_results, serialize_sweep_job_request
45
45
  from iqm.station_control.client.serializers.channel_property_serializer import unpack_channel_properties
46
46
  from iqm.station_control.client.serializers.setting_node_serializer import deserialize_setting_node
47
- from iqm.station_control.client.serializers.task_serializers import deserialize_sweep_task_request
47
+ from iqm.station_control.client.serializers.task_serializers import deserialize_sweep_job_request
48
48
  from iqm.station_control.client.station_control import StationControlClient
49
49
  from iqm.station_control.interface.models import (
50
50
  DutData,
51
51
  DutFieldData,
52
+ JobStatus,
52
53
  Statuses,
53
54
  SweepData,
54
55
  SweepDefinition,
55
56
  SweepResults,
56
- SweepStatus,
57
57
  )
58
+ from iqm.station_control.interface.models.jobs import JobData, JobResult
58
59
  from iqm.station_control.interface.models.sweep import SweepBase
59
60
 
60
61
  logger = logging.getLogger(__name__)
@@ -112,7 +113,7 @@ class IqmServerClient(StationControlClient, metaclass=IqmServerClientMeta):
112
113
  proto.SubmitJobRequestV1(
113
114
  qc_id=self._current_qc.id,
114
115
  type=proto.JobType.PULSE,
115
- payload=serialize_sweep_task_request(sweep_definition, queue_name="sweeps"),
116
+ payload=serialize_sweep_job_request(sweep_definition, queue_name="sweeps"),
116
117
  use_timeslot=self._connection_params.use_timeslot,
117
118
  )
118
119
  )
@@ -122,8 +123,7 @@ class IqmServerClient(StationControlClient, metaclass=IqmServerClientMeta):
122
123
  job_id = from_proto_uuid(job.id)
123
124
  self._latest_submitted_sweep = dataclasses.replace(sweep_definition, sweep_id=job_id)
124
125
  return {
125
- "sweep_id": str(job_id),
126
- "task_id": str(job_id),
126
+ "job_id": str(job_id),
127
127
  }
128
128
 
129
129
  def get_sweep(self, sweep_id: uuid.UUID) -> SweepData:
@@ -140,7 +140,7 @@ class IqmServerClient(StationControlClient, metaclass=IqmServerClientMeta):
140
140
  modified_timestamp=to_datetime(job.updated_at),
141
141
  begin_timestamp=to_datetime(job.execution_started_at) if job.HasField("execution_started_at") else None,
142
142
  end_timestamp=to_datetime(job.execution_ended_at) if job.HasField("execution_ended_at") else None,
143
- sweep_status=to_sweep_status(job.status),
143
+ job_status=to_job_status(job.status),
144
144
  # Sweep definition is a subclass of SweepBase so we can just copy all SweepBase fields
145
145
  # from the input sweep to the sweep data
146
146
  **{f.name: getattr(sweep, f.name) for f in dataclasses.fields(SweepBase)},
@@ -152,26 +152,28 @@ class IqmServerClient(StationControlClient, metaclass=IqmServerClientMeta):
152
152
  data_chunks = jobs.GetJobResultsV1(proto.JobLookupV1(id=to_proto_uuid(sweep_id)))
153
153
  return deserialize_sweep_results(load_all(data_chunks))
154
154
 
155
- def revoke_sweep(self, sweep_id: uuid.UUID) -> None:
155
+ def abort_job(self, sweep_id: uuid.UUID) -> None:
156
156
  with wrap_error("Job cancellation failed"):
157
157
  jobs = proto.JobsStub(self._channel)
158
158
  jobs.CancelJobV1(proto.JobLookupV1(id=to_proto_uuid(sweep_id)))
159
159
 
160
- def get_task(self, task_id: uuid.UUID) -> dict:
160
+ def get_job(self, job_id: uuid.UUID) -> JobData:
161
161
  with wrap_error("Job loading failed"):
162
162
  jobs = proto.JobsStub(self._channel)
163
- job: proto.JobV1 = jobs.GetJobV1(proto.JobLookupV1(id=to_proto_uuid(task_id)))
164
- return {
165
- # It would be nice to have these typed somewhere...
166
- "task_id": str(from_proto_uuid(job.id)),
167
- "task_status": to_task_status(job.status),
168
- "task_result": {"message": ""},
169
- "task_error": job.error if job.HasField("error") else "",
170
- "position": job.queue_position if job.HasField("queue_position") else None,
171
- "is_position_capped": False,
172
- }
163
+ job: proto.JobV1 = jobs.GetJobV1(proto.JobLookupV1(id=to_proto_uuid(job_id)))
164
+ return JobData(
165
+ job_id=from_proto_uuid(job.id),
166
+ job_status=job.status,
167
+ job_result=JobResult(
168
+ job_id=from_proto_uuid(job.id),
169
+ parallel_sweep_progress=[],
170
+ interrupted=False,
171
+ ),
172
+ job_error=job.error if job.HasField("error") else "",
173
+ position=job.queue_position if job.HasField("queue_position") else None,
174
+ )
173
175
 
174
- def _wait_task_completion(
176
+ def _wait_job_completion(
175
177
  self,
176
178
  task_id: str,
177
179
  update_progress_callback: Callable[[Statuses], None] | None,
@@ -284,28 +286,28 @@ def parse_calibration_set(cal_set_data: bytes) -> tuple[uuid.UUID, dict[str, Obs
284
286
 
285
287
 
286
288
  def payload_to_sweep(job_payload: bytes) -> SweepDefinition:
287
- sweep, _ = deserialize_sweep_task_request(job_payload)
289
+ sweep, _ = deserialize_sweep_job_request(job_payload)
288
290
  return sweep
289
291
 
290
292
 
291
- def to_sweep_status(job_status: proto.JobStatus) -> SweepStatus:
293
+ def to_job_status(job_status: proto.JobStatus) -> JobStatus:
292
294
  match job_status:
293
295
  case proto.JobStatus.IN_QUEUE:
294
- return SweepStatus.PENDING
296
+ return JobStatus.PENDING_EXECUTION
295
297
  case proto.JobStatus.EXECUTING:
296
- return SweepStatus.PROGRESS
298
+ return JobStatus.EXECUTION_START
297
299
  case proto.JobStatus.FAILED:
298
- return SweepStatus.FAILURE
300
+ return JobStatus.FAILED
299
301
  case proto.JobStatus.COMPLETED:
300
- return SweepStatus.SUCCESS
302
+ return JobStatus.READY
301
303
  case proto.JobStatus.INTERRUPTED:
302
- return SweepStatus.INTERRUPTED
304
+ return JobStatus.ABORTED
303
305
  case proto.JobStatus.CANCELLED:
304
- return SweepStatus.REVOKED
306
+ return JobStatus.ABORTED
305
307
  raise ValueError(f"Unknown job status: '{job_status}'")
306
308
 
307
309
 
308
- def to_task_status(job_status: proto.JobStatus) -> str:
310
+ def to_string_job_status(job_status: proto.JobStatus) -> str:
309
311
  match job_status:
310
312
  case proto.JobStatus.IN_QUEUE:
311
313
  return "PENDING"
@@ -30,6 +30,6 @@ from iqm.station_control.client.serializers.sweep_serializers import (
30
30
  serialize_sweep_results,
31
31
  )
32
32
  from iqm.station_control.client.serializers.task_serializers import (
33
- serialize_run_task_request,
34
- serialize_sweep_task_request,
33
+ serialize_run_job_request,
34
+ serialize_sweep_job_request,
35
35
  )
@@ -27,7 +27,8 @@ from exa.common.data.setting_node import SettingNode
27
27
  from exa.common.sweep.database_serialization import decode_and_validate_sweeps, encode_nd_sweeps
28
28
  from iqm.station_control.client.serializers.datetime_serializers import deserialize_datetime, serialize_datetime
29
29
  from iqm.station_control.client.serializers.playlist_serializers import pack_playlist, unpack_playlist
30
- from iqm.station_control.interface.models import SweepData, SweepDefinition, SweepResults, SweepStatus
30
+ from iqm.station_control.interface.models import SweepData, SweepDefinition, SweepResults
31
+ from iqm.station_control.interface.models.jobs import JobStatus
31
32
 
32
33
 
33
34
  def serialize_sweep_definition(sweep_definition: SweepDefinition) -> SweepDefinitionProto:
@@ -74,7 +75,7 @@ def serialize_sweep_data(sweep_data: SweepData) -> dict:
74
75
  "modified_timestamp": serialize_datetime(sweep_data.modified_timestamp),
75
76
  "begin_timestamp": serialize_datetime(sweep_data.begin_timestamp),
76
77
  "end_timestamp": serialize_datetime(sweep_data.end_timestamp),
77
- "sweep_status": sweep_data.sweep_status.value,
78
+ "job_status": sweep_data.job_status.value,
78
79
  }
79
80
 
80
81
 
@@ -90,7 +91,7 @@ def deserialize_sweep_data(data: dict) -> SweepData:
90
91
  modified_timestamp=deserialize_datetime(data["modified_timestamp"]),
91
92
  begin_timestamp=deserialize_datetime(data["begin_timestamp"]),
92
93
  end_timestamp=deserialize_datetime(data["end_timestamp"]),
93
- sweep_status=SweepStatus(data["sweep_status"]),
94
+ job_status=JobStatus(data["job_status"]),
94
95
  )
95
96
 
96
97
 
@@ -29,7 +29,7 @@ from iqm.station_control.client.serializers.sweep_serializers import (
29
29
  from iqm.station_control.interface.models import RunDefinition, SweepDefinition
30
30
 
31
31
 
32
- def serialize_run_task_request(run_definition: RunDefinition, queue_name: str) -> bytes:
32
+ def serialize_run_job_request(run_definition: RunDefinition, queue_name: str) -> bytes:
33
33
  """Wrap `run_definition` and `queue_name` into a protobuf message and serialize into a bitstring.
34
34
 
35
35
  Args:
@@ -42,10 +42,10 @@ def serialize_run_task_request(run_definition: RunDefinition, queue_name: str) -
42
42
 
43
43
  """
44
44
  payload = serialize_run_definition(run_definition)
45
- return _serialize_task_request(payload, queue_name, run_definition.sweep_definition.sweep_id)
45
+ return _serialize_job_request(payload, queue_name, run_definition.sweep_definition.sweep_id)
46
46
 
47
47
 
48
- def serialize_sweep_task_request(sweep_definition: SweepDefinition, queue_name: str) -> bytes:
48
+ def serialize_sweep_job_request(sweep_definition: SweepDefinition, queue_name: str) -> bytes:
49
49
  """Wrap `sweep_definition` and `queue_name` into a protobuf message and serialize into a bitstring.
50
50
 
51
51
  Args:
@@ -58,10 +58,16 @@ def serialize_sweep_task_request(sweep_definition: SweepDefinition, queue_name:
58
58
 
59
59
  """
60
60
  payload = serialize_sweep_definition(sweep_definition)
61
- return _serialize_task_request(payload, queue_name, sweep_definition.sweep_id)
61
+ return _serialize_job_request(payload, queue_name, sweep_definition.sweep_id)
62
62
 
63
63
 
64
- def deserialize_sweep_task_request(data: bytes) -> tuple[SweepDefinition, str]:
64
+ def _serialize_job_request(payload: Any, queue_name: str, sweep_id: uuid.UUID) -> bytes:
65
+ sweep_job_request_proto = SweepTaskRequestProto(queue_name=queue_name, sweep_id=str(sweep_id))
66
+ sweep_job_request_proto.payload.Pack(payload, type_url_prefix="iqm-data-definitions")
67
+ return sweep_job_request_proto.SerializeToString()
68
+
69
+
70
+ def deserialize_sweep_job_request(data: bytes) -> tuple[SweepDefinition, str]:
65
71
  """Deserializes `sweep_definition` and `queue_name` from the serialized bitstring.
66
72
 
67
73
  Args:
@@ -81,9 +87,3 @@ def deserialize_sweep_task_request(data: bytes) -> tuple[SweepDefinition, str]:
81
87
  sweep_definition = deserialize_sweep_definition(sweep_definition_proto)
82
88
  queue_name = sweep_task_request_proto.queue_name
83
89
  return sweep_definition, queue_name
84
-
85
-
86
- def _serialize_task_request(payload: Any, queue_name: str, sweep_id: uuid.UUID) -> bytes:
87
- sweep_task_request_proto = SweepTaskRequestProto(queue_name=queue_name, sweep_id=str(sweep_id))
88
- sweep_task_request_proto.payload.Pack(payload, type_url_prefix="iqm-data-definitions")
89
- return sweep_task_request_proto.SerializeToString()
@@ -55,8 +55,8 @@ from iqm.station_control.client.list_models import (
55
55
  from iqm.station_control.client.serializers import (
56
56
  deserialize_run_data,
57
57
  deserialize_sweep_results,
58
- serialize_run_task_request,
59
- serialize_sweep_task_request,
58
+ serialize_run_job_request,
59
+ serialize_sweep_job_request,
60
60
  )
61
61
  from iqm.station_control.client.serializers.channel_property_serializer import unpack_channel_properties
62
62
  from iqm.station_control.client.serializers.setting_node_serializer import deserialize_setting_node
@@ -87,6 +87,7 @@ from iqm.station_control.interface.models import (
87
87
  SweepDefinition,
88
88
  SweepResults,
89
89
  )
90
+ from iqm.station_control.interface.models.jobs import JobData, JobStatus
90
91
  from iqm.station_control.interface.pydantic_base import PydanticBase
91
92
 
92
93
  logger = logging.getLogger(__name__)
@@ -277,14 +278,14 @@ class StationControlClient:
277
278
  sweep_definition: The content of the sweep to be created.
278
279
 
279
280
  Returns:
280
- Dict containing the task ID and sweep ID, and corresponding hrefs, of a successful sweep execution
281
- in monolithic mode or successful submission to the task queue in remote mode.
281
+ Dict containing the job ID and sweep ID, and corresponding hrefs, of a successful sweep execution
282
+ in monolithic mode or successful submission to the job queue in remote mode.
282
283
 
283
284
  Raises:
284
285
  ExaError if submitting a sweep failed.
285
286
 
286
287
  """
287
- data = serialize_sweep_task_request(sweep_definition, queue_name="sweeps")
288
+ data = serialize_sweep_job_request(sweep_definition, queue_name="sweeps")
288
289
  headers = {"Content-Type": "application/octet-stream"}
289
290
  return self._send_request(requests.post, "sweeps", data=data, headers=headers).json()
290
291
 
@@ -293,14 +294,13 @@ class StationControlClient:
293
294
  response = self._send_request(requests.get, f"sweeps/{sweep_id}")
294
295
  return deserialize_sweep_data(response.json())
295
296
 
296
- def revoke_sweep(self, sweep_id: uuid.UUID) -> None:
297
- """Either remove a sweep task from the queue, or abort it gracefully if it's already executing.
297
+ def abort_job(self, job_id: uuid.UUID) -> None:
298
+ """Either remove a job from the queue, or abort it gracefully if it's already executing.
298
299
 
299
- If the task was already executing when revoked, the status of the task will be set to ``"INTERRUPTED"``.
300
- If the task had not started yet, the status will be set to ``"REVOKED"``.
301
- If the task is not found or is already finished nothing happens.
300
+ The status of the job will be set to ``JobStatus.ABORTED``.
301
+ If the job is not found or is already finished nothing happens.
302
302
  """
303
- self._send_request(requests.post, f"sweeps/{sweep_id}/revoke")
303
+ self._send_request(requests.post, f"jobs/{job_id}/abort")
304
304
 
305
305
  def delete_sweep(self, sweep_id: uuid.UUID) -> None:
306
306
  """Delete sweep in the database."""
@@ -315,15 +315,15 @@ class StationControlClient:
315
315
  self,
316
316
  run_definition: RunDefinition,
317
317
  update_progress_callback: Callable[[Statuses], None] | None = None,
318
- wait_task_completion: bool = True,
318
+ wait_job_completion: bool = True,
319
319
  ) -> bool:
320
320
  """Execute an N-dimensional sweep of selected variables and save run, sweep and results."""
321
- data = serialize_run_task_request(run_definition, queue_name="sweeps")
321
+ data = serialize_run_job_request(run_definition, queue_name="sweeps")
322
322
 
323
323
  headers = {"Content-Type": "application/octet-stream"}
324
324
  response = self._send_request(requests.post, "runs", data=data, headers=headers)
325
- if wait_task_completion:
326
- return self._wait_task_completion(response.json()["task_id"], update_progress_callback)
325
+ if wait_job_completion:
326
+ return self._wait_job_completion(response.json()["job_id"], update_progress_callback)
327
327
  return False
328
328
 
329
329
  def get_run(self, run_id: uuid.UUID) -> RunData:
@@ -675,70 +675,62 @@ class StationControlClient:
675
675
  response = self._send_request(requests.get, f"sequence-results/{sequence_id}")
676
676
  return SequenceResultData.model_validate(response.json())
677
677
 
678
- def get_task(self, task_id: uuid.UUID) -> dict:
679
- """Get task data."""
680
- response = self._send_request(requests.get, f"tasks/{task_id}")
678
+ def get_job(self, job_id: uuid.UUID) -> JobData:
679
+ """Get job data."""
680
+ response = self._send_request(requests.get, f"jobs/{job_id}")
681
681
  return response.json()
682
682
 
683
- def _wait_task_completion(self, task_id: str, update_progress_callback: Callable[[Statuses], None] | None) -> bool:
684
- logger.info("Celery task ID: %s", task_id)
683
+ def _wait_job_completion(self, job_id: str, update_progress_callback: Callable[[Statuses], None] | None) -> bool:
684
+ logger.info("Waiting for job ID: %s", job_id)
685
685
  update_progress_callback = update_progress_callback or (lambda status: None)
686
686
  try:
687
- task_status = self._poll_task_status_while_in_pending(task_id, update_progress_callback)
688
- if task_status == "PROGRESS":
689
- self._poll_task_status_while_in_progress(task_id, update_progress_callback)
687
+ job_status = self._poll_job_status_until_execution_start(job_id, update_progress_callback)
688
+ if JobStatus(job_status) not in JobStatus.terminal_statuses():
689
+ self._poll_job_status_until_terminal(job_id, update_progress_callback)
690
690
  except KeyboardInterrupt as exc:
691
- logger.info("Caught %s, revoking task %s", exc, task_id)
692
- self._send_request(requests.post, f"tasks/{task_id}/revoke")
691
+ logger.info("Caught %s, revoking job %s", exc, job_id)
692
+ self.abort_job(uuid.UUID(job_id))
693
693
  return True
694
694
  return False
695
695
 
696
- def _poll_task_status_while_in_pending(
697
- self, task_id: str, update_progress_callback: Callable[[Statuses], None]
698
- ) -> str:
699
- # Keep polling task status as long as it's PENDING, and update progress with `update_progress_callback`.
696
+ def _poll_job_status_until_execution_start(
697
+ self, job_id: str, update_progress_callback: Callable[[Statuses], None]
698
+ ) -> JobStatus:
699
+ # Keep polling job status as long as it's PENDING, and update progress with `update_progress_callback`.
700
700
  max_seen_position = 0
701
- was_warned = False
702
701
  while True:
703
- task = self._poll_task(task_id)
704
- if task["task_status"] != "PENDING":
702
+ job = self._poll_job(job_id)
703
+ if job["job_status"] >= JobStatus.EXECUTION_START:
705
704
  if max_seen_position:
706
705
  update_progress_callback([("Progress in queue", max_seen_position, max_seen_position)])
707
- return task["task_status"]
708
- position = task["position"]
706
+ return job["job_status"]
707
+ position = job["position"]
708
+
709
709
  if position == 0:
710
710
  sleep(1)
711
711
  continue
712
712
  max_seen_position = max(max_seen_position, position)
713
- if task["is_position_capped"] and not was_warned:
714
- logger.info(
715
- "Over %s tasks before this in queue. Progress will be shown when in top %s.",
716
- max_seen_position,
717
- max_seen_position,
718
- )
719
- was_warned = True
720
- else:
721
- update_progress_callback([("Progress in queue", max_seen_position - position, max_seen_position)])
713
+ update_progress_callback([("Progress in queue", max_seen_position - position, max_seen_position)])
722
714
  sleep(1)
723
715
 
724
- def _poll_task_status_while_in_progress(
716
+ def _poll_job_status_until_terminal(
725
717
  self,
726
- task_id: str,
718
+ job_id: str,
727
719
  update_progress_callback: Callable[[Statuses], None],
728
720
  ) -> None:
729
- # Keep polling task status as long as it's PROGRESS, and update progress with `update_progress_callback`.
721
+ # Keep polling job status until it finishes, and update progress with `update_progress_callback`.
730
722
  while True:
731
- task = self._poll_task(task_id)
732
- update_progress_callback(task["task_result"].get("parallel_sweep_progress", []))
733
- if task["task_status"] != "PROGRESS":
723
+ job = self._poll_job(job_id)
724
+ update_progress_callback(job["job_result"].get("parallel_sweep_progress", []))
725
+ if JobStatus(job["job_status"]) in JobStatus.terminal_statuses():
734
726
  return
735
727
  sleep(1)
736
728
 
737
- def _poll_task(self, task_id: str) -> dict:
738
- task = self._send_request(requests.get, f"tasks/{task_id}").json()
739
- if task["task_status"] == "FAILURE":
740
- raise InternalServerError(f"Task: {task.get('task_id')}\n{task.get('task_error')}")
741
- return task
729
+ def _poll_job(self, job_id: str) -> JobData:
730
+ job = self._send_request(requests.get, f"jobs/{job_id}").json()
731
+ if job["job_status"] == JobStatus.FAILED:
732
+ raise InternalServerError(f"Job: {job.get('job_id')}\n{job.get('job_error')}")
733
+ return job
742
734
 
743
735
  def _send_request(
744
736
  self, http_method: Callable[..., requests.Response], url_path: str, **kwargs
@@ -14,6 +14,9 @@
14
14
  """Station control interface models."""
15
15
 
16
16
  from iqm.station_control.interface.models.dut import DutData, DutFieldData
17
+ from iqm.station_control.interface.models.jobs import (
18
+ JobStatus,
19
+ )
17
20
  from iqm.station_control.interface.models.observation import (
18
21
  ObservationData,
19
22
  ObservationDefinition,
@@ -32,7 +35,7 @@ from iqm.station_control.interface.models.sequence import (
32
35
  SequenceResultData,
33
36
  SequenceResultDefinition,
34
37
  )
35
- from iqm.station_control.interface.models.sweep import SweepData, SweepDefinition, SweepStatus
38
+ from iqm.station_control.interface.models.sweep import SweepData, SweepDefinition
36
39
  from iqm.station_control.interface.models.type_aliases import (
37
40
  DutType,
38
41
  GetObservationsMode,
@@ -0,0 +1,125 @@
1
+ # Copyright 2025 IQM
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """Job executor artifact and state models."""
15
+
16
+ from __future__ import annotations
17
+
18
+ from datetime import datetime
19
+ from enum import Enum
20
+ import functools
21
+ from uuid import UUID
22
+
23
+ from iqm.station_control.interface.pydantic_base import PydanticBase
24
+
25
+
26
+ class TimelineEntry(PydanticBase):
27
+ """Status and timestamp pair as described in a job timeline."""
28
+
29
+ status: JobStatus
30
+ timestamp: datetime
31
+
32
+
33
+ class JobResult(PydanticBase):
34
+ """Progress information about a running job."""
35
+
36
+ job_id: UUID
37
+ parallel_sweep_progress: list[tuple[str, int, int]]
38
+ interrupted: bool
39
+
40
+
41
+ class JobData(PydanticBase):
42
+ """Job response data model"""
43
+
44
+ job_id: UUID
45
+ job_status: JobStatus
46
+ job_result: JobResult
47
+ job_error: str | None
48
+ position: int | None
49
+
50
+
51
+ @functools.total_ordering
52
+ class JobStatus(Enum):
53
+ """Enumeration of different states a job can be in. The ordering of these statuses is important,
54
+ and execution logic relies on it. Thus if a new status is added, ensure that it is slotted
55
+ in at the appropriate place. See the __lt__ implementation for further details.
56
+ """
57
+
58
+ # Received by the server
59
+ RECEIVED = "received"
60
+
61
+ # Validating the job
62
+ VALIDATING = "validating"
63
+ ACCEPTED = "accepted"
64
+
65
+ PREPARING_CALIBRATION = "preparing_calibration"
66
+
67
+ # Running PulLA
68
+ PENDING_COMPILATION = "pending_compilation"
69
+ COMPILING = "compiling"
70
+ COMPILED = "compiled"
71
+
72
+ # Executing sweep
73
+ STORING_SWEEP_METADATA = "storing_sweep_metadata"
74
+ METADATA_STORED = "metadata_stored"
75
+ PENDING_EXECUTION = "pending_execution"
76
+ EXECUTION_START = "execution_start"
77
+ EXECUTION_END = "execution_end"
78
+ PENDING_POST_PROCESSING = "pending_post_processing"
79
+ POST_PROCESSING = "post_processing"
80
+ READY = "ready"
81
+
82
+ # Job failed, can happen at any stage
83
+ FAILED = "failed"
84
+
85
+ # Job aborted
86
+ ABORTED = "aborted"
87
+
88
+ def __str__(self):
89
+ return self.name.lower()
90
+
91
+ def __hash__(self):
92
+ return hash(self.name)
93
+
94
+ def __eq__(self, other):
95
+ if isinstance(other, str):
96
+ try:
97
+ other = JobStatus(other.lower())
98
+ except ValueError:
99
+ return False
100
+ elif not isinstance(other, JobStatus):
101
+ return NotImplemented
102
+ return self.name == other.name
103
+
104
+ def __lt__(self, other):
105
+ """Enable comparison according to definition order.
106
+
107
+ Examples:
108
+ >>> JobStatus.ACCEPTED < JobStatus.COMPILED
109
+ True
110
+
111
+ """
112
+ if isinstance(other, str):
113
+ try:
114
+ other = JobStatus(other.lower())
115
+ except ValueError:
116
+ return NotImplemented
117
+ elif not isinstance(other, JobStatus):
118
+ return NotImplemented
119
+ members = list(JobStatus.__members__.values())
120
+ return members.index(self) < members.index(other)
121
+
122
+ @classmethod
123
+ def terminal_statuses(cls) -> set[JobStatus]:
124
+ """Statuses from which the execution can't continue."""
125
+ return {cls.ABORTED, cls.FAILED, cls.READY}
@@ -0,0 +1,45 @@
1
+ # Copyright 2024 IQM
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """job executor artifact and state models."""
15
+
16
+ from __future__ import annotations
17
+
18
+ from datetime import datetime
19
+ from uuid import UUID
20
+
21
+ from iqm.station_control.interface.models import JobStatus
22
+ from iqm.station_control.interface.pydantic_base import PydanticBase
23
+
24
+
25
+ class JobStateTimestamp(PydanticBase):
26
+ """Represents a single timestamped state for a job."""
27
+
28
+ job_id: UUID
29
+ status: JobStatus
30
+ timestamp: datetime
31
+
32
+
33
+ class JobsInQueue(PydanticBase):
34
+ """List of jobs in a particular queue, corresponding to some job state."""
35
+
36
+ jobs: list[JobStateTimestamp] = []
37
+ job_count: int = 0
38
+
39
+
40
+ class QueueState(PydanticBase):
41
+ """Describes the state of a single job queue."""
42
+
43
+ queue: JobStatus
44
+ jobs_in_queue: int
45
+ completed_jobs: int
@@ -15,13 +15,13 @@
15
15
 
16
16
  from dataclasses import dataclass
17
17
  from datetime import datetime
18
- from enum import Enum
19
18
  import uuid
20
19
 
21
20
  from iqm.models.playlist import Playlist
22
21
 
23
22
  from exa.common.data.setting_node import SettingNode
24
23
  from exa.common.sweep.util import NdSweep
24
+ from iqm.station_control.interface.models.jobs import JobStatus
25
25
 
26
26
 
27
27
  @dataclass(kw_only=True)
@@ -50,23 +50,6 @@ class SweepDefinition(SweepBase):
50
50
  """A :class:`~iqm.models.playlist.Playlist` that should be uploaded to the controllers."""
51
51
 
52
52
 
53
- class SweepStatus(Enum):
54
- """Status for sweeps."""
55
-
56
- PENDING = "PENDING"
57
- """Sweep waiting for execution or status unknown"""
58
- PROGRESS = "PROGRESS"
59
- """Sweep is currently being executed"""
60
- SUCCESS = "SUCCESS"
61
- """Sweep has succeeded, ready for result retrieval"""
62
- FAILURE = "FAILURE"
63
- """Sweep has failed"""
64
- REVOKED = "REVOKED"
65
- """Sweep was revoked from execution"""
66
- INTERRUPTED = "INTERRUPTED"
67
- """Sweep was interrupted during its execution"""
68
-
69
-
70
53
  @dataclass(kw_only=True)
71
54
  class SweepData(SweepBase):
72
55
  """The content of the sweep stored in the database."""
@@ -79,5 +62,5 @@ class SweepData(SweepBase):
79
62
  """Time when the sweep began in the station control."""
80
63
  end_timestamp: datetime | None
81
64
  """Time when the sweep ended in the station control."""
82
- sweep_status: SweepStatus
65
+ job_status: JobStatus
83
66
  """Status of sweep execution."""
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: iqm-station-control-client
3
- Version: 3.17.0
3
+ Version: 4.0.0
4
4
  Summary: Python client for communicating with Station Control Service
5
5
  Author-email: IQM Finland Oy <info@meetiqm.com>
6
6
  License: Apache License
@@ -1,11 +1,11 @@
1
1
  iqm/station_control/client/__init__.py,sha256=BmBIBdZa10r-IWCFzZ1-0DG6GQKPIXqGXltfXop4ZeQ,942
2
2
  iqm/station_control/client/list_models.py,sha256=SjD0DbCrM9z1SSuGoQS83lyJmDLuMOatpJUoW8itW9s,2335
3
- iqm/station_control/client/station_control.py,sha256=XdYBblHtMJ6H81PC5R9Orazo6gfVS577tfA7e4D-x7I,37900
3
+ iqm/station_control/client/station_control.py,sha256=XMLN3bNklF40N-6N1M-lF4vfG8IouX2qE1ZI_ujH3ck,37474
4
4
  iqm/station_control/client/utils.py,sha256=cpS3hXEeeIXeqd_vBnnwo3JHS83FrNpG07SiTUwUx-I,1650
5
5
  iqm/station_control/client/iqm_server/__init__.py,sha256=nLsRHN1rnOKXwuzaq_liUpAYV3sis5jkyHccSdacV7U,624
6
6
  iqm/station_control/client/iqm_server/error.py,sha256=ZLV2-gxFLHZjZVkI3L5sWcBMiay7NT-ijIEvrXgVJT8,1166
7
7
  iqm/station_control/client/iqm_server/grpc_utils.py,sha256=ee42C31_JIXlP6ikZQbohgUQjqCvcSSxIW_9lm9MMV8,5727
8
- iqm/station_control/client/iqm_server/iqm_server_client.py,sha256=24qQQONpw_9tzoYYtDkYLZLRAxLjk3xI5JAqK6DUCWk,14589
8
+ iqm/station_control/client/iqm_server/iqm_server_client.py,sha256=ZSon3fHpgfAF8M3tymSu9LfVG-LAxDY_ddQhbrR4uAU,14612
9
9
  iqm/station_control/client/iqm_server/meta_class.py,sha256=pePJ0Xy0aiJg-bZWK8D87gblq6imfXLsZHjpZkf5D9s,1399
10
10
  iqm/station_control/client/iqm_server/proto/__init__.py,sha256=mOJQ_H-NEyJMffRaDSSZeXrScHaHaHEXULv-O_OJA3A,1345
11
11
  iqm/station_control/client/iqm_server/proto/calibration_pb2.py,sha256=gum0DGmqxhbfaar8SqahmSif1pB6hgo0pVcnoi3VMUo,3017
@@ -25,28 +25,30 @@ iqm/station_control/client/iqm_server/proto/uuid_pb2.pyi,sha256=9LXcqNoQS1iapCso
25
25
  iqm/station_control/client/iqm_server/proto/uuid_pb2_grpc.py,sha256=SF40l84__r-OGGNYBru5ik9gih-XqeTq2iwM5gMN5Qc,726
26
26
  iqm/station_control/client/iqm_server/testing/__init__.py,sha256=wCNfJHIR_bqG3ZBlgm55v90Rih7VCpfctoIMfwRMgjk,567
27
27
  iqm/station_control/client/iqm_server/testing/iqm_server_mock.py,sha256=X_Chi8TKx95PiuhFfGnRu9LxeIpnKKynW_8tXwxFQD8,3340
28
- iqm/station_control/client/serializers/__init__.py,sha256=8os3EGOtNTRFaviZdGwDyMt9GUpM3ZP7icPKAxOg1qg,1438
28
+ iqm/station_control/client/serializers/__init__.py,sha256=Hui5SrT4NVeV-27g116ZJgvRQo8ednLMsGpu4V39g1k,1436
29
29
  iqm/station_control/client/serializers/channel_property_serializer.py,sha256=ChlX8B-blM5hjv3pUExHOd-vE3O_myPwILu36KZYYNU,7121
30
30
  iqm/station_control/client/serializers/datetime_serializers.py,sha256=Ke6VRHa_359xYxXTegs8iweoDfuGeBDgkoOtGBbyC1Q,1122
31
31
  iqm/station_control/client/serializers/playlist_serializers.py,sha256=S8RuKdqeJxqUf7_kqTDXIKnuo6g-WpzGY7cesSQa3Rw,18086
32
32
  iqm/station_control/client/serializers/run_serializers.py,sha256=4zH0I5EvvaP7wgLMprXXWa36nAPO4Lv0fPkCrDC_v-g,6698
33
33
  iqm/station_control/client/serializers/setting_node_serializer.py,sha256=m4Sbm8Qr3GiSNiE-Jh8gFEgfscfN1xxELb0vCa9cK70,1197
34
34
  iqm/station_control/client/serializers/struct_serializer.py,sha256=QztBsbRlRG_UrtpQLE3bi0WKEVn48kVB91H1g26PvqQ,3270
35
- iqm/station_control/client/serializers/sweep_serializers.py,sha256=2COfE3ewusjeIp7q3sQWEpc9nzUrfjMNTeemihtpzKA,5725
36
- iqm/station_control/client/serializers/task_serializers.py,sha256=m0H2JTxak_eAyl88Ptj7EmJhcAIW5ozPJEVYfZYBIu8,3709
35
+ iqm/station_control/client/serializers/sweep_serializers.py,sha256=pfdr36L6whk6M5koUsQsL61R1WUWiLOgQUHvwIVBUyY,5766
36
+ iqm/station_control/client/serializers/task_serializers.py,sha256=KTd3zdfqr9kSIiEbwsoSNiSu_hxzFfDOlLuttoyBfps,3700
37
37
  iqm/station_control/interface/__init__.py,sha256=MIQla-cBKPbZqBkp-LNyPfjiV0gzf-IFEwrMMhsnKlg,785
38
38
  iqm/station_control/interface/list_with_meta.py,sha256=GAXIDEXKeua6-2FoQ_O1tkhx-d8pBMGHaIkdvgg-cag,1185
39
39
  iqm/station_control/interface/pydantic_base.py,sha256=MVzcsH7wG1DON-qTw6KLpUDay7_b_9CDQgymVzg9HwI,1303
40
- iqm/station_control/interface/models/__init__.py,sha256=PbENb1YhX2OLGTiJNvj70e5Acj7f8jMP3hp5ncH5U-0,1527
40
+ iqm/station_control/interface/models/__init__.py,sha256=JeXFg-ltPOoSY6qVclUKCVyt-7okNgCptLb3h3t3JD4,1587
41
41
  iqm/station_control/interface/models/dut.py,sha256=dd1SpcsBe4P057jvcPqv39SjzekewwP07hThFe5ulNA,1216
42
+ iqm/station_control/interface/models/jobs.py,sha256=nQNyMauQu99LTUK38meVWXSMdX3D-Fgn8VsYDMTwYN8,3611
43
+ iqm/station_control/interface/models/monitor.py,sha256=9p-hg5DWrlbCIrJ0GyQ90DrHr1Yc9DCQyfpsQCm0cDE,1328
42
44
  iqm/station_control/interface/models/observation.py,sha256=Jce4lIsUtHRIFT3nr-cbKvh3dbR2Y_yM5x0yyvUdjF8,3261
43
45
  iqm/station_control/interface/models/observation_set.py,sha256=Ko2o3-9I38NfjNF2IQPcwfbwpkTQ3PIU7fUiSaDleX8,3031
44
46
  iqm/station_control/interface/models/run.py,sha256=m-iE3QMPQUOF7bsw8JCAM1Bd6bDVhAgxrtc_AC7rCkc,4097
45
47
  iqm/station_control/interface/models/sequence.py,sha256=uOqMwF1x-vW6UHs2WnPD3PsuSgV3a8OTAsgn_4UENLw,2723
46
- iqm/station_control/interface/models/sweep.py,sha256=6SQ4Ty4_Rm1KTeR7YfrLmwyD-AnNE495LMxYu8dM4Ko,2947
48
+ iqm/station_control/interface/models/sweep.py,sha256=ZV_1zcEF5_NY0nPgC75tU7s14TE1o0croBSClIVSmCE,2493
47
49
  iqm/station_control/interface/models/type_aliases.py,sha256=3LB9viZVi8osavY5kKF8TH1crayG7-MLjgBqXDCqL2s,1018
48
- iqm_station_control_client-3.17.0.dist-info/LICENSE.txt,sha256=R6Q7eUrLyoCQgWYorQ8WJmVmWKYU3dxA3jYUp0wwQAw,11332
49
- iqm_station_control_client-3.17.0.dist-info/METADATA,sha256=f52bS2Dy-hSiPXcZW9mHHfeRagoLY7R9GdoE6JJQDVM,14010
50
- iqm_station_control_client-3.17.0.dist-info/WHEEL,sha256=y4mX-SOX4fYIkonsAGA5N0Oy-8_gI4FXw5HNI1xqvWg,91
51
- iqm_station_control_client-3.17.0.dist-info/top_level.txt,sha256=NB4XRfyDS6_wG9gMsyX-9LTU7kWnTQxNvkbzIxGv3-c,4
52
- iqm_station_control_client-3.17.0.dist-info/RECORD,,
50
+ iqm_station_control_client-4.0.0.dist-info/LICENSE.txt,sha256=R6Q7eUrLyoCQgWYorQ8WJmVmWKYU3dxA3jYUp0wwQAw,11332
51
+ iqm_station_control_client-4.0.0.dist-info/METADATA,sha256=P4V5yKTIBLiuLj4UJvlEoBYQmec-eH4iAqNn6UBJysU,14009
52
+ iqm_station_control_client-4.0.0.dist-info/WHEEL,sha256=y4mX-SOX4fYIkonsAGA5N0Oy-8_gI4FXw5HNI1xqvWg,91
53
+ iqm_station_control_client-4.0.0.dist-info/top_level.txt,sha256=NB4XRfyDS6_wG9gMsyX-9LTU7kWnTQxNvkbzIxGv3-c,4
54
+ iqm_station_control_client-4.0.0.dist-info/RECORD,,