iqm-station-control-client 11.3.1__py3-none-any.whl → 12.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. iqm/station_control/client/authentication.py +239 -0
  2. iqm/station_control/client/iqm_server/error.py +0 -30
  3. iqm/station_control/client/iqm_server/grpc_utils.py +0 -156
  4. iqm/station_control/client/iqm_server/iqm_server_client.py +0 -489
  5. iqm/station_control/client/list_models.py +16 -11
  6. iqm/station_control/client/qon.py +1 -1
  7. iqm/station_control/client/serializers/run_serializers.py +5 -4
  8. iqm/station_control/client/serializers/struct_serializer.py +1 -1
  9. iqm/station_control/client/station_control.py +140 -154
  10. iqm/station_control/client/utils.py +4 -42
  11. iqm/station_control/interface/models/__init__.py +21 -2
  12. iqm/station_control/interface/models/circuit.py +348 -0
  13. iqm/station_control/interface/models/dynamic_quantum_architecture.py +61 -3
  14. iqm/station_control/interface/models/jobs.py +41 -12
  15. iqm/station_control/interface/models/observation_set.py +28 -4
  16. iqm/station_control/interface/models/run.py +8 -8
  17. iqm/station_control/interface/models/sweep.py +7 -1
  18. iqm/station_control/interface/models/type_aliases.py +1 -2
  19. iqm/station_control/interface/station_control.py +1 -1
  20. {iqm_station_control_client-11.3.1.dist-info → iqm_station_control_client-12.0.1.dist-info}/METADATA +3 -3
  21. iqm_station_control_client-12.0.1.dist-info/RECORD +42 -0
  22. iqm/station_control/client/iqm_server/__init__.py +0 -14
  23. iqm/station_control/client/iqm_server/proto/__init__.py +0 -43
  24. iqm/station_control/client/iqm_server/proto/calibration_pb2.py +0 -48
  25. iqm/station_control/client/iqm_server/proto/calibration_pb2.pyi +0 -45
  26. iqm/station_control/client/iqm_server/proto/calibration_pb2_grpc.py +0 -152
  27. iqm/station_control/client/iqm_server/proto/common_pb2.py +0 -43
  28. iqm/station_control/client/iqm_server/proto/common_pb2.pyi +0 -32
  29. iqm/station_control/client/iqm_server/proto/common_pb2_grpc.py +0 -17
  30. iqm/station_control/client/iqm_server/proto/job_pb2.py +0 -57
  31. iqm/station_control/client/iqm_server/proto/job_pb2.pyi +0 -107
  32. iqm/station_control/client/iqm_server/proto/job_pb2_grpc.py +0 -436
  33. iqm/station_control/client/iqm_server/proto/qc_pb2.py +0 -51
  34. iqm/station_control/client/iqm_server/proto/qc_pb2.pyi +0 -57
  35. iqm/station_control/client/iqm_server/proto/qc_pb2_grpc.py +0 -163
  36. iqm/station_control/client/iqm_server/proto/uuid_pb2.py +0 -39
  37. iqm/station_control/client/iqm_server/proto/uuid_pb2.pyi +0 -26
  38. iqm/station_control/client/iqm_server/proto/uuid_pb2_grpc.py +0 -17
  39. iqm/station_control/client/iqm_server/testing/__init__.py +0 -13
  40. iqm/station_control/client/iqm_server/testing/iqm_server_mock.py +0 -102
  41. iqm_station_control_client-11.3.1.dist-info/RECORD +0 -59
  42. {iqm_station_control_client-11.3.1.dist-info → iqm_station_control_client-12.0.1.dist-info}/LICENSE.txt +0 -0
  43. {iqm_station_control_client-11.3.1.dist-info → iqm_station_control_client-12.0.1.dist-info}/WHEEL +0 -0
  44. {iqm_station_control_client-11.3.1.dist-info → iqm_station_control_client-12.0.1.dist-info}/top_level.txt +0 -0
@@ -1,489 +0,0 @@
1
- # Copyright 2025 IQM
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
- """Client implementation for IQM Server."""
15
-
16
- from collections.abc import Callable, Iterable, Sequence
17
- from contextlib import contextmanager
18
- import dataclasses
19
- from io import BytesIO
20
- import json
21
- import logging
22
- from time import sleep
23
- from typing import Any, TypeVar, cast
24
- import uuid
25
- from uuid import UUID
26
-
27
- import grpc
28
- from iqm.models.channel_properties import AWGProperties, ChannelProperties, ReadoutProperties
29
- import requests
30
-
31
- from exa.common.data.setting_node import SettingNode
32
- from exa.common.data.value import ObservationValue, validate_value
33
- from iqm.station_control.client.iqm_server import proto
34
- from iqm.station_control.client.iqm_server.error import IqmServerError
35
- from iqm.station_control.client.iqm_server.grpc_utils import (
36
- create_channel,
37
- extract_error,
38
- from_proto_uuid,
39
- load_all,
40
- parse_connection_params,
41
- to_datetime,
42
- to_proto_uuid,
43
- )
44
- from iqm.station_control.client.list_models import DutFieldDataList, DutList
45
- from iqm.station_control.client.serializers import deserialize_sweep_results, serialize_sweep_job_request
46
- from iqm.station_control.client.serializers.setting_node_serializer import deserialize_setting_node
47
- from iqm.station_control.client.serializers.task_serializers import deserialize_sweep_job_request
48
- from iqm.station_control.client.station_control import _StationControlClientBase
49
- from iqm.station_control.interface.list_with_meta import ListWithMeta
50
- from iqm.station_control.interface.models import (
51
- DutData,
52
- DutFieldData,
53
- DynamicQuantumArchitecture,
54
- JobData,
55
- JobExecutorStatus,
56
- JobResult,
57
- ObservationData,
58
- ObservationDefinition,
59
- ObservationLite,
60
- ObservationSetData,
61
- ObservationSetDefinition,
62
- ObservationSetUpdate,
63
- ObservationUpdate,
64
- QualityMetrics,
65
- RunData,
66
- RunDefinition,
67
- RunLite,
68
- SequenceMetadataData,
69
- SequenceMetadataDefinition,
70
- SequenceResultData,
71
- SequenceResultDefinition,
72
- StaticQuantumArchitecture,
73
- Statuses,
74
- SweepData,
75
- SweepDefinition,
76
- SweepResults,
77
- )
78
- from iqm.station_control.interface.models.jobs import JobError
79
- from iqm.station_control.interface.models.sweep import SweepBase
80
- from iqm.station_control.interface.models.type_aliases import GetObservationsMode, SoftwareVersionSet, StrUUID
81
-
82
- logger = logging.getLogger(__name__)
83
-
84
- T = TypeVar("T")
85
-
86
-
87
- class IqmServerClient(_StationControlClientBase):
88
- def __init__(
89
- self,
90
- root_url: str,
91
- *,
92
- get_token_callback: Callable[[], str] | None = None,
93
- client_signature: str | None = None,
94
- grpc_channel: grpc.Channel | None = None,
95
- ):
96
- super().__init__(root_url, get_token_callback=get_token_callback, client_signature=client_signature)
97
- self._connection_params = parse_connection_params(root_url)
98
- self._cached_resources: dict[str, Any] = {}
99
- self._latest_submitted_sweep = None
100
- self._channel = grpc_channel or create_channel(self._connection_params, self._get_token_callback)
101
- self._current_qc = resolve_current_qc(self._channel, self._connection_params.quantum_computer)
102
-
103
- def __del__(self):
104
- try:
105
- self._channel.close()
106
- except Exception:
107
- pass
108
-
109
- def get_about(self) -> dict:
110
- return self._get_resource("about", parse_json)
111
-
112
- def get_health(self) -> dict:
113
- raise NotImplementedError
114
-
115
- def get_configuration(self) -> dict:
116
- return self._get_resource("configuration", parse_json)
117
-
118
- def get_exa_configuration(self) -> str:
119
- raise NotImplementedError
120
-
121
- def get_or_create_software_version_set(self, software_version_set: SoftwareVersionSet) -> int:
122
- raise NotImplementedError
123
-
124
- def get_settings(self) -> SettingNode:
125
- return self._get_resource("settings", deserialize_setting_node).copy()
126
-
127
- def get_chip_design_record(self, dut_label: str) -> dict:
128
- return self._get_resource(f"chip-design-records/{dut_label}", parse_json)
129
-
130
- def get_channel_properties(self) -> dict[str, ChannelProperties]:
131
- def bytes_to_dict(data: bytes) -> dict[str, ChannelProperties]:
132
- """Deserialize bytes to a dictionary of channel property values."""
133
- json_props: dict[str, dict] = json.loads(data.decode("utf-8"))
134
- # Convert JSON representation to pythonic ChannelProperties
135
- return _convert_channel_property_json_to_python(json_props)
136
-
137
- channel_props = self._get_resource("channel-properties", bytes_to_dict)
138
- return channel_props
139
-
140
- def sweep(self, sweep_definition: SweepDefinition) -> dict:
141
- with wrap_error("Job submission failed"):
142
- jobs = proto.JobsStub(self._channel)
143
- job: proto.JobV1 = jobs.SubmitJobV1(
144
- proto.SubmitJobRequestV1(
145
- qc_id=self._current_qc.id,
146
- type=proto.JobType.PULSE,
147
- payload=serialize_sweep_job_request(sweep_definition, queue_name="sweeps"),
148
- use_timeslot=self._connection_params.use_timeslot,
149
- )
150
- )
151
- # Optimization: we know that in most of the cases the submitted sweep is queried
152
- # right after submitting it so we can cache reference to the submitted sweep here
153
- # to avoid extra request to the server
154
- job_id = from_proto_uuid(job.id)
155
- self._latest_submitted_sweep = dataclasses.replace(sweep_definition, sweep_id=job_id) # type: ignore[assignment]
156
- return {
157
- "job_id": str(job_id),
158
- }
159
-
160
- def get_sweep(self, sweep_id: UUID) -> SweepData:
161
- with wrap_error("Job loading failed"):
162
- if isinstance(sweep_id, str):
163
- sweep_id = uuid.UUID(sweep_id)
164
-
165
- jobs = proto.JobsStub(self._channel)
166
- job_lookup = proto.JobLookupV1(id=to_proto_uuid(sweep_id))
167
- job: proto.JobV1 = jobs.GetJobV1(job_lookup)
168
- # IQM server job does not include any details about the sweep properties so we need to
169
- # construct the resulting sweep data using the payload (= input sweep) and metadata
170
- # from the IQM server job
171
- sweep = self._get_cached_sweep(sweep_id) or payload_to_sweep(load_all(jobs.GetJobPayloadV1(job_lookup)))
172
- return SweepData(
173
- created_timestamp=to_datetime(job.created_at),
174
- modified_timestamp=to_datetime(job.updated_at),
175
- begin_timestamp=to_datetime(job.execution_started_at) if job.HasField("execution_started_at") else None,
176
- end_timestamp=to_datetime(job.execution_ended_at) if job.HasField("execution_ended_at") else None,
177
- job_status=to_job_executor_status(job.status),
178
- # Sweep definition is a subclass of SweepBase so we can just copy all SweepBase fields
179
- # from the input sweep to the sweep data
180
- **{f.name: getattr(sweep, f.name) for f in dataclasses.fields(SweepBase)},
181
- )
182
-
183
- def delete_sweep(self, sweep_id: UUID) -> None:
184
- raise NotImplementedError
185
-
186
- def get_sweep_results(self, sweep_id: UUID) -> SweepResults:
187
- with wrap_error("Job result loading failed"):
188
- jobs = proto.JobsStub(self._channel)
189
- data_chunks = jobs.GetJobResultsV1(proto.JobLookupV1(id=to_proto_uuid(sweep_id)))
190
- return deserialize_sweep_results(load_all(data_chunks))
191
-
192
- def run(
193
- self,
194
- run_definition: RunDefinition,
195
- update_progress_callback: Callable[[Statuses], None] | None = None,
196
- wait_job_completion: bool = True,
197
- ) -> bool:
198
- raise NotImplementedError
199
-
200
- def get_run(self, run_id: UUID) -> RunData:
201
- raise NotImplementedError
202
-
203
- def query_runs(self, **kwargs) -> ListWithMeta[RunLite]: # type: ignore[type-arg]
204
- raise NotImplementedError
205
-
206
- def create_observations(
207
- self, observation_definitions: Sequence[ObservationDefinition]
208
- ) -> ListWithMeta[ObservationData]: # type: ignore[type-arg]
209
- raise NotImplementedError
210
-
211
- def get_observations(
212
- self,
213
- *,
214
- mode: GetObservationsMode,
215
- dut_label: str | None = None,
216
- dut_field: str | None = None,
217
- tags: list[str] | None = None,
218
- invalid: bool | None = False,
219
- run_ids: list[UUID] | None = None,
220
- sequence_ids: list[UUID] | None = None,
221
- limit: int | None = None,
222
- ) -> list[ObservationData]:
223
- raise NotImplementedError
224
-
225
- def query_observations(self, **kwargs) -> ListWithMeta[ObservationData]: # type: ignore[type-arg]
226
- raise NotImplementedError
227
-
228
- def update_observations(self, observation_updates: Sequence[ObservationUpdate]) -> list[ObservationData]:
229
- raise NotImplementedError
230
-
231
- def query_observation_sets(self, **kwargs) -> ListWithMeta[ObservationSetData]: # type: ignore[type-arg]
232
- raise NotImplementedError
233
-
234
- def create_observation_set(self, observation_set_definition: ObservationSetDefinition) -> ObservationSetData:
235
- raise NotImplementedError
236
-
237
- def get_observation_set(self, observation_set_id: UUID) -> ObservationSetData:
238
- raise NotImplementedError
239
-
240
- def update_observation_set(self, observation_set_update: ObservationSetUpdate) -> ObservationSetData:
241
- raise NotImplementedError
242
-
243
- def finalize_observation_set(self, observation_set_id: UUID) -> None:
244
- raise NotImplementedError
245
-
246
- def get_observation_set_observations(self, observation_set_id: UUID) -> list[ObservationLite]:
247
- raise NotImplementedError
248
-
249
- def get_default_calibration_set(self) -> ObservationSetData:
250
- raise NotImplementedError
251
-
252
- def get_default_calibration_set_observations(self) -> list[ObservationLite]:
253
- raise NotImplementedError
254
-
255
- def get_dynamic_quantum_architecture(self, calibration_set_id: UUID) -> DynamicQuantumArchitecture:
256
- response = self._send_request(requests.get, f"api/v1/calibration/{calibration_set_id}/gates")
257
- return DynamicQuantumArchitecture.model_validate_json(response.text)
258
-
259
- def get_default_dynamic_quantum_architecture(self) -> DynamicQuantumArchitecture:
260
- raise NotImplementedError
261
-
262
- def get_default_calibration_set_quality_metrics(self) -> QualityMetrics:
263
- raise NotImplementedError
264
-
265
- def get_calibration_set_quality_metrics(self, calibration_set_id: UUID) -> QualityMetrics:
266
- raise NotImplementedError
267
-
268
- def get_duts(self) -> list[DutData]:
269
- return self._get_resource("duts", lambda data: DutList.model_validate(parse_json(data))) # type: ignore[arg-type,return-value]
270
-
271
- def get_dut_fields(self, dut_label: str) -> list[DutFieldData]:
272
- return self._get_resource(
273
- f"dut-fields/{dut_label}",
274
- lambda data: DutFieldDataList.model_validate(parse_json(data)), # type: ignore[arg-type,return-value]
275
- )
276
-
277
- def query_sequence_metadatas(self, **kwargs) -> ListWithMeta[SequenceMetadataData]: # type: ignore[type-arg]
278
- raise NotImplementedError
279
-
280
- def create_sequence_metadata(
281
- self, sequence_metadata_definition: SequenceMetadataDefinition
282
- ) -> SequenceMetadataData:
283
- raise NotImplementedError
284
-
285
- def save_sequence_result(self, sequence_result_definition: SequenceResultDefinition) -> SequenceResultData:
286
- raise NotImplementedError
287
-
288
- def get_sequence_result(self, sequence_id: UUID) -> SequenceResultData:
289
- raise NotImplementedError
290
-
291
- def get_static_quantum_architecture(self, dut_label: str) -> StaticQuantumArchitecture:
292
- response = self._send_request(requests.get, "api/v1/quantum-architecture")
293
- return StaticQuantumArchitecture.model_validate_json(response.text)
294
-
295
- def get_job(self, job_id: StrUUID) -> JobData:
296
- with wrap_error("Job loading failed"):
297
- jobs = proto.JobsStub(self._channel)
298
- job: proto.JobV1 = jobs.GetJobV1(proto.JobLookupV1(id=to_proto_uuid(job_id)))
299
- return JobData(
300
- job_id=from_proto_uuid(job.id),
301
- job_status=to_job_executor_status(job.status),
302
- job_result=JobResult(
303
- job_id=from_proto_uuid(job.id),
304
- parallel_sweep_progress=[],
305
- interrupted=False,
306
- ),
307
- job_error=JobError(full_error_log=job.error, user_error_message=job.error)
308
- if job.HasField("error")
309
- else None,
310
- position=job.queue_position if job.HasField("queue_position") else None,
311
- )
312
-
313
- def abort_job(self, job_id: StrUUID) -> None:
314
- with wrap_error("Job cancellation failed"):
315
- jobs = proto.JobsStub(self._channel)
316
- jobs.CancelJobV1(proto.JobLookupV1(id=to_proto_uuid(job_id)))
317
-
318
- def get_calibration_set_values(self, calibration_set_id: StrUUID) -> dict[str, ObservationValue]:
319
- with wrap_error("Calibration set loading failed"):
320
- calibrations = proto.CalibrationsStub(self._channel)
321
- data_chunks = calibrations.GetFullCalibrationDataV1(
322
- proto.CalibrationLookupV1(
323
- id=to_proto_uuid(calibration_set_id),
324
- )
325
- )
326
- _, cal_set_values = parse_calibration_set(load_all(data_chunks))
327
- return cal_set_values
328
-
329
- def get_latest_calibration_set_id(self, dut_label: str) -> uuid.UUID:
330
- with wrap_error("Calibration set metadata loading failed"):
331
- calibrations = proto.CalibrationsStub(self._channel)
332
- metadata: proto.CalibrationMetadataV1 = calibrations.GetLatestQuantumComputerCalibrationV1(
333
- proto.LatestQuantumComputerCalibrationLookupV1(
334
- qc_id=self._current_qc.id,
335
- )
336
- )
337
- if metadata.dut_label != dut_label:
338
- raise ValueError(f"No calibration set for dut_label = {dut_label}")
339
- return from_proto_uuid(metadata.id)
340
-
341
- def _wait_job_completion(
342
- self,
343
- task_id: str,
344
- update_progress_callback: Callable[[Statuses], None] | None,
345
- ) -> bool:
346
- with wrap_error("Job subscription failed"):
347
- try:
348
- notify = update_progress_callback or (lambda _: None)
349
- job_id = uuid.UUID(task_id)
350
- initial_queue_position = None
351
- status = None
352
- # SubscribeToJobV1 runs until job reaches its final status (completed, failed, interrupted)
353
- job_events = subscribe_to_job_events(self._channel, job_id)
354
- for job in job_events:
355
- status = job.status
356
- if status == proto.JobStatus.IN_QUEUE:
357
- if initial_queue_position is None:
358
- initial_queue_position = job.queue_position
359
- queue_progress = initial_queue_position - job.queue_position
360
- notify([("Progress in queue", queue_progress, initial_queue_position)])
361
- # In case of success, mark progress bar to 100% (looks nicer)
362
- if initial_queue_position is not None and status == proto.JobStatus.COMPLETED:
363
- notify([("Progress in queue", initial_queue_position, initial_queue_position)])
364
- return False
365
- except KeyboardInterrupt:
366
- return True
367
-
368
- def _get_cached_sweep(self, sweep_id: uuid.UUID) -> SweepDefinition | None:
369
- latest_submitted = self._latest_submitted_sweep
370
- if latest_submitted and latest_submitted.sweep_id == sweep_id:
371
- return latest_submitted
372
- return None
373
-
374
- def _get_resource(self, resource_name: str, deserialize: Callable[[bytes], T]) -> T:
375
- with wrap_error(f"Failed to load QC resource '{resource_name}'"):
376
- if (cached := self._cached_resources.get(resource_name)) is not None:
377
- return cached
378
- qcs = proto.QuantumComputersStub(self._channel)
379
- data_chunks = qcs.GetQuantumComputerResourceV1(
380
- proto.QuantumComputerResourceLookupV1(
381
- qc_id=self._current_qc.id,
382
- resource_name=resource_name,
383
- )
384
- )
385
- resource = deserialize(load_all(data_chunks))
386
- self._cached_resources[resource_name] = resource
387
- return resource
388
-
389
-
390
- def resolve_current_qc(channel: grpc.Channel, alias: str) -> proto.QuantumComputerV1:
391
- qcs = proto.QuantumComputersStub(channel)
392
- qc_list: proto.QuantumComputersListV1 = qcs.ListQuantumComputersV1(proto.ListQuantumComputerFiltersV1())
393
- for qc in qc_list.items:
394
- if qc.alias == alias:
395
- return qc
396
- raise ValueError(f"Quantum computer '{alias}' does not exist")
397
-
398
-
399
- def subscribe_to_job_events(channel: grpc.Channel, job_id: uuid.UUID) -> Iterable[proto.JobV1]:
400
- jobs = proto.JobsStub(channel)
401
- attempts = 1
402
- while True:
403
- try:
404
- events = jobs.SubscribeToJobV1(proto.JobLookupV1(id=to_proto_uuid(job_id)))
405
- for event in events:
406
- job_event = cast(proto.JobEventV1, event)
407
- if job_event.HasField("update"):
408
- yield job_event.update
409
- return
410
- except grpc.RpcError as e:
411
- # Server may cancel subscription due to e.g. restarts, in which case we can just retry after some waiting
412
- error = extract_error(e)
413
- if error.error_code == "server_cancel" and attempts <= 10:
414
- attempts += 1
415
- sleep(5)
416
- continue
417
- raise e
418
-
419
-
420
- def _convert_channel_property_json_to_python(channel_property_json: dict[str, dict]) -> dict[str, ChannelProperties]:
421
- """Convert the JSON representation of channel properties to a dictionary containing pythonic ChannelProperties."""
422
- channel_properties: dict[str, ChannelProperties] = {}
423
- for name, props in channel_property_json.items():
424
- if "fast_feedback_sources" in props:
425
- channel_properties[name] = AWGProperties(
426
- sampling_rate=props.get("sampling_rate"),
427
- instruction_duration_granularity=props.get("instruction_duration_granularity"),
428
- instruction_duration_min=props.get("instruction_duration_min"),
429
- fast_feedback_sources=props.get("fast_feedback_sources", []),
430
- compatible_instructions=props.get("compatible_instructions", []),
431
- local_oscillator=props.get("local_oscillator"),
432
- mixer_correction=props.get("mixer_correction"),
433
- )
434
- if "integration_start_dead_time" in props:
435
- channel_properties[name] = ReadoutProperties(
436
- sampling_rate=props.get("sampling_rate"),
437
- instruction_duration_granularity=props.get("instruction_duration_granularity"),
438
- instruction_duration_min=props.get("instruction_duration_min"),
439
- compatible_instructions=props.get("compatible_instructions", []),
440
- integration_start_dead_time=props.get("integration_start_dead_time"),
441
- integration_stop_dead_time=props.get("integration_stop_dead_time"),
442
- )
443
- return channel_properties
444
-
445
-
446
- def parse_calibration_set(cal_set_data: bytes) -> tuple[uuid.UUID, dict[str, ObservationValue]]:
447
- # IQM server calibration sets are in cocos calibration set JSON format, we can get
448
- # both id and observations from it
449
- cal_set = parse_json(cal_set_data)
450
- cal_set_id = cal_set["calibration_set_id"]
451
- observations = cal_set.get("observations", {})
452
- cal_set_values = {k: validate_value(v["value"]) for k, v in observations.items()}
453
- return cal_set_id, cal_set_values
454
-
455
-
456
- def payload_to_sweep(job_payload: bytes) -> SweepDefinition:
457
- sweep, _ = deserialize_sweep_job_request(job_payload)
458
- return sweep
459
-
460
-
461
- def to_job_executor_status(job_status: proto.JobStatus) -> JobExecutorStatus:
462
- match job_status:
463
- case proto.JobStatus.IN_QUEUE:
464
- return JobExecutorStatus.PENDING_EXECUTION
465
- case proto.JobStatus.EXECUTING:
466
- return JobExecutorStatus.EXECUTION_STARTED
467
- case proto.JobStatus.FAILED:
468
- return JobExecutorStatus.FAILED
469
- case proto.JobStatus.COMPLETED:
470
- return JobExecutorStatus.READY
471
- case proto.JobStatus.INTERRUPTED:
472
- return JobExecutorStatus.ABORTED
473
- case proto.JobStatus.CANCELLED:
474
- return JobExecutorStatus.ABORTED
475
- raise ValueError(f"Unknown job status: '{job_status}'")
476
-
477
-
478
- def parse_json(data: bytes) -> Any:
479
- return json.load(BytesIO(data))
480
-
481
-
482
- @contextmanager
483
- def wrap_error(title: str): # noqa: ANN201
484
- try:
485
- yield
486
- except grpc.RpcError as e:
487
- raise extract_error(e, title) from e
488
- except Exception as e:
489
- raise IqmServerError(message=f"{title}: {e}", status_code=str(grpc.StatusCode.INTERNAL.name)) from e
@@ -32,6 +32,7 @@ from iqm.station_control.interface.models import (
32
32
  RunLite,
33
33
  SequenceMetadataData,
34
34
  StaticQuantumArchitecture,
35
+ TimelineEntry,
35
36
  )
36
37
  from iqm.station_control.interface.pydantic_base import PydanticBase
37
38
 
@@ -70,41 +71,45 @@ class ListModel(RootModel):
70
71
  )
71
72
 
72
73
 
73
- class DutList(ListModel):
74
+ class DutList(ListModel): # noqa: D101
74
75
  root: list[DutData]
75
76
 
76
77
 
77
- class DutFieldDataList(ListModel):
78
+ class DutFieldDataList(ListModel): # noqa: D101
78
79
  root: list[DutFieldData]
79
80
 
80
81
 
81
- class ObservationDataList(ListModel):
82
+ class ObservationDataList(ListModel): # noqa: D101
82
83
  root: list[ObservationData]
83
84
 
84
85
 
85
- class ObservationDefinitionList(ListModel):
86
+ class ObservationDefinitionList(ListModel): # noqa: D101
86
87
  root: list[ObservationDefinition]
87
88
 
88
89
 
89
- class ObservationLiteList(ListModel):
90
+ class ObservationLiteList(ListModel): # noqa: D101
90
91
  root: list[ObservationLite]
91
92
 
92
93
 
93
- class ObservationUpdateList(ListModel):
94
+ class ObservationUpdateList(ListModel): # noqa: D101
94
95
  root: list[ObservationUpdate]
95
96
 
96
97
 
97
- class ObservationSetDataList(ListModel):
98
+ class ObservationSetDataList(ListModel): # noqa: D101
98
99
  root: list[ObservationSetData]
99
100
 
100
101
 
101
- class SequenceMetadataDataList(ListModel):
102
+ class RunLiteList(ListModel): # noqa: D101
103
+ root: list[RunLite]
104
+
105
+
106
+ class SequenceMetadataDataList(ListModel): # noqa: D101
102
107
  root: list[SequenceMetadataData]
103
108
 
104
109
 
105
- class StaticQuantumArchitectureList(ListModel):
110
+ class StaticQuantumArchitectureList(ListModel): # noqa: D101
106
111
  root: list[StaticQuantumArchitecture]
107
112
 
108
113
 
109
- class RunLiteList(ListModel):
110
- root: list[RunLite]
114
+ class TimelineEntryList(ListModel): # noqa: D101
115
+ root: list[TimelineEntry]
@@ -23,7 +23,7 @@ from typing import Annotated, Any, Final, TypeAlias
23
23
  from pydantic import Field
24
24
  from pydantic.dataclasses import dataclass
25
25
 
26
- from iqm.station_control.interface.models.observation import ObservationBase
26
+ from iqm.station_control.interface.models import ObservationBase
27
27
 
28
28
  logger = logging.getLogger(__name__)
29
29
 
@@ -40,8 +40,8 @@ def serialize_run_definition(run_definition: RunDefinition) -> RunDefinitionProt
40
40
  username=run_definition.username,
41
41
  experiment_name=run_definition.experiment_name,
42
42
  experiment_label=run_definition.experiment_label,
43
- options=serialize_struct(run_definition.options), # type: ignore[arg-type]
44
- additional_run_properties=serialize_struct(run_definition.additional_run_properties), # type: ignore[arg-type]
43
+ options=serialize_struct(run_definition.options),
44
+ additional_run_properties=serialize_struct(run_definition.additional_run_properties),
45
45
  software_version_set_id=run_definition.software_version_set_id,
46
46
  components=run_definition.components,
47
47
  default_data_parameters=run_definition.default_data_parameters,
@@ -50,8 +50,9 @@ def serialize_run_definition(run_definition: RunDefinition) -> RunDefinitionProt
50
50
  run_definition_proto.sweep_definition_payload.Pack(
51
51
  serialize_sweep_definition(run_definition.sweep_definition), type_url_prefix="iqm-data-definitions"
52
52
  )
53
- for key, sweep in run_definition.hard_sweeps.items(): # type: ignore[union-attr]
53
+ for key, sweep in run_definition.hard_sweeps.items():
54
54
  run_definition_proto.hard_sweeps[key].CopyFrom(proto_serialization.nd_sweep.pack(sweep, minimal=False))
55
+
55
56
  return run_definition_proto
56
57
 
57
58
 
@@ -94,7 +95,7 @@ def serialize_run_data(run_data: RunData) -> dict:
94
95
  "options": run_data.options,
95
96
  "additional_run_properties": run_data.additional_run_properties,
96
97
  "software_version_set_id": run_data.software_version_set_id,
97
- "hard_sweeps": {key: encode_nd_sweeps(value) for key, value in run_data.hard_sweeps.items()}, # type: ignore[union-attr]
98
+ "hard_sweeps": {key: encode_nd_sweeps(value) for key, value in run_data.hard_sweeps.items()},
98
99
  "components": run_data.components,
99
100
  "default_data_parameters": run_data.default_data_parameters,
100
101
  "default_sweep_parameters": run_data.default_sweep_parameters,
@@ -33,7 +33,7 @@ def deserialize_struct(proto: pb.Struct) -> dict:
33
33
  return {key: _deserialize_value(value) for key, value in proto.fields.items()}
34
34
 
35
35
 
36
- def _serialize_value(value: None | float | str | bool | dict | list | int) -> pb.Value:
36
+ def _serialize_value(value: None | float | str | bool | dict | list) -> pb.Value:
37
37
  """Serialize a value into a Value protobuf representation."""
38
38
  if value is None:
39
39
  return pb.Value(null_value=True)