iqm-station-control-client 3.12__py3-none-any.whl → 3.14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. iqm/station_control/client/iqm_server/__init__.py +14 -0
  2. iqm/station_control/client/iqm_server/error.py +30 -0
  3. iqm/station_control/client/iqm_server/grpc_utils.py +154 -0
  4. iqm/station_control/client/iqm_server/iqm_server_client.py +332 -0
  5. iqm/station_control/client/iqm_server/meta_class.py +38 -0
  6. iqm/station_control/client/iqm_server/proto/__init__.py +43 -0
  7. iqm/station_control/client/iqm_server/proto/calibration_pb2.py +48 -0
  8. iqm/station_control/client/iqm_server/proto/calibration_pb2.pyi +45 -0
  9. iqm/station_control/client/iqm_server/proto/calibration_pb2_grpc.py +152 -0
  10. iqm/station_control/client/iqm_server/proto/common_pb2.py +43 -0
  11. iqm/station_control/client/iqm_server/proto/common_pb2.pyi +32 -0
  12. iqm/station_control/client/iqm_server/proto/common_pb2_grpc.py +17 -0
  13. iqm/station_control/client/iqm_server/proto/job_pb2.py +57 -0
  14. iqm/station_control/client/iqm_server/proto/job_pb2.pyi +107 -0
  15. iqm/station_control/client/iqm_server/proto/job_pb2_grpc.py +436 -0
  16. iqm/station_control/client/iqm_server/proto/qc_pb2.py +51 -0
  17. iqm/station_control/client/iqm_server/proto/qc_pb2.pyi +57 -0
  18. iqm/station_control/client/iqm_server/proto/qc_pb2_grpc.py +163 -0
  19. iqm/station_control/client/iqm_server/proto/uuid_pb2.py +39 -0
  20. iqm/station_control/client/iqm_server/proto/uuid_pb2.pyi +26 -0
  21. iqm/station_control/client/iqm_server/proto/uuid_pb2_grpc.py +17 -0
  22. iqm/station_control/client/iqm_server/testing/__init__.py +13 -0
  23. iqm/station_control/client/iqm_server/testing/iqm_server_mock.py +102 -0
  24. iqm/station_control/client/serializers/task_serializers.py +28 -1
  25. iqm/station_control/client/station_control.py +77 -1
  26. iqm/station_control/client/utils.py +16 -1
  27. {iqm_station_control_client-3.12.dist-info → iqm_station_control_client-3.14.dist-info}/METADATA +2 -1
  28. iqm_station_control_client-3.14.dist-info/RECORD +52 -0
  29. iqm_station_control_client-3.12.dist-info/RECORD +0 -29
  30. {iqm_station_control_client-3.12.dist-info → iqm_station_control_client-3.14.dist-info}/LICENSE.txt +0 -0
  31. {iqm_station_control_client-3.12.dist-info → iqm_station_control_client-3.14.dist-info}/WHEEL +0 -0
  32. {iqm_station_control_client-3.12.dist-info → iqm_station_control_client-3.14.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,14 @@
1
+ # Copyright 2025 IQM
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """StationControlClient implementation for IQM Server"""
@@ -0,0 +1,30 @@
1
+ # Copyright 2025 IQM
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ from exa.common.errors.server_errors import StationControlError
16
+
17
+
18
+ class IqmServerError(StationControlError):
19
+ def __init__(self, message: str, status_code: str, error_code: str | None = None, details: dict | None = None):
20
+ super().__init__(message)
21
+ self.status_code = status_code
22
+ self.error_code = error_code
23
+ self.details = details
24
+
25
+ def __str__(self):
26
+ s = f"{self.message} (status_code = {self.status_code}, error_code = {self.error_code}"
27
+ if details := self.details:
28
+ s += f", details = {details}"
29
+ s += ")"
30
+ return s
@@ -0,0 +1,154 @@
1
+ # Copyright 2025 IQM
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """Internal utility functions used by IqmServerClient."""
15
+
16
+ from collections.abc import Callable, Iterable
17
+ from dataclasses import dataclass
18
+ from datetime import datetime
19
+ import uuid
20
+
21
+ from google.protobuf import json_format, struct_pb2, timestamp_pb2
22
+ import grpc
23
+ from grpc import Compression
24
+ from pydantic import HttpUrl
25
+
26
+ from iqm.station_control.client.iqm_server import proto
27
+ from iqm.station_control.client.iqm_server.error import IqmServerError
28
+
29
+
30
+ class ClientCallDetails(grpc.ClientCallDetails):
31
+ def __init__(self, details):
32
+ self.method = details.method
33
+ self.metadata = list(details.metadata or [])
34
+ self.timeout = details.timeout
35
+ self.credentials = details.credentials
36
+ self.wait_for_ready = details.wait_for_ready
37
+ self.compression = details.compression
38
+
39
+
40
+ class ApiTokenAuth(grpc.UnaryUnaryClientInterceptor, grpc.UnaryStreamClientInterceptor):
41
+ def __init__(self, get_token_callback: Callable[[], str]):
42
+ self.get_token_callback = get_token_callback
43
+
44
+ def _add_auth_header(self, client_call_details) -> ClientCallDetails:
45
+ details = ClientCallDetails(client_call_details)
46
+ token = self.get_token_callback()
47
+ details.metadata.append(("authorization", f"Bearer {token}"))
48
+ return details
49
+
50
+ def intercept_unary_stream(self, continuation, client_call_details, request):
51
+ return continuation(self._add_auth_header(client_call_details), request)
52
+
53
+ def intercept_unary_unary(self, continuation, client_call_details, request):
54
+ return continuation(self._add_auth_header(client_call_details), request)
55
+
56
+
57
+ @dataclass(frozen=True, kw_only=True)
58
+ class ConnectionParameters:
59
+ server_address: str
60
+ is_secure: bool
61
+ quantum_computer: str
62
+ use_timeslot: bool
63
+
64
+
65
+ def parse_connection_params(qc_url: str) -> ConnectionParameters:
66
+ # Security measure: mitigate UTF-8 read order control character
67
+ # exploits by allowing only ASCII urls
68
+ if not qc_url.isascii():
69
+ raise ValueError("Invalid quantum computer URL")
70
+
71
+ # IQM Server QC urls are now form "https://cocos.<server_base_url>/<qc_name>[:timeslot]"
72
+ # In the future, "cocos." subdomain will be dropped. The parsing logic should work with
73
+ # the both url formats
74
+ url = HttpUrl(qc_url)
75
+ qc_name = (url.path or "").split("/")[-1].removesuffix(":timeslot")
76
+ use_timeslot = qc_url.endswith(":timeslot")
77
+ if not qc_name:
78
+ raise ValueError("Invalid quantum computer URL: device name is missing")
79
+
80
+ is_secure = url.scheme == "https"
81
+ hostname = (url.host or "").removeprefix("cocos.")
82
+ port = url.port or (443 if is_secure else 80)
83
+
84
+ return ConnectionParameters(
85
+ server_address=f"{hostname}:{port}",
86
+ is_secure=is_secure,
87
+ quantum_computer=qc_name,
88
+ use_timeslot=use_timeslot,
89
+ )
90
+
91
+
92
+ def create_channel(
93
+ connection_params: ConnectionParameters,
94
+ get_token_callback: Callable[[], str] | None = None,
95
+ enable_compression: bool = True,
96
+ ) -> grpc.Channel:
97
+ compression = Compression.Gzip if enable_compression else None
98
+ options = [
99
+ # Let's try to parametrize this at least when we're merging station-control-client and iqm-client
100
+ ("grpc.keepalive_time_ms", 5000),
101
+ ("grpc.keepalive_permit_without_calls", 1),
102
+ ("grpc.http2.max_pings_without_data", 0),
103
+ ("grpc.keepalive_timeout_ms", 1000),
104
+ ]
105
+ address = connection_params.server_address
106
+ channel = (
107
+ grpc.secure_channel(
108
+ address, credentials=grpc.ssl_channel_credentials(), options=options, compression=compression
109
+ )
110
+ if connection_params.is_secure
111
+ else grpc.insecure_channel(address, options=options, compression=compression)
112
+ )
113
+ if get_token_callback is not None:
114
+ channel = grpc.intercept_channel(channel, ApiTokenAuth(get_token_callback))
115
+ return channel
116
+
117
+
118
+ def to_proto_uuid(value: uuid.UUID) -> proto.Uuid:
119
+ return proto.Uuid(raw=value.bytes)
120
+
121
+
122
+ def from_proto_uuid(value: proto.Uuid) -> uuid.UUID:
123
+ if value.WhichOneof("data") == "str":
124
+ return uuid.UUID(hex=value.str)
125
+ return uuid.UUID(bytes=value.raw)
126
+
127
+
128
+ def to_datetime(timestamp: timestamp_pb2.Timestamp) -> datetime:
129
+ return timestamp.ToDatetime()
130
+
131
+
132
+ def load_all(chunks: Iterable[proto.DataChunk]) -> bytes:
133
+ result = bytearray()
134
+ for chunk in chunks:
135
+ result.extend(chunk.data)
136
+ return bytes(result)
137
+
138
+
139
+ def extract_error(error: grpc.RpcError, title: str | None = None) -> IqmServerError:
140
+ message = error.details()
141
+ status_code = str(error.code().name)
142
+ metadata = {k: v for k, v in list(error.initial_metadata()) + list(error.trailing_metadata())}
143
+ error_code = str(metadata.get("error_code")) if "error_code" in metadata else None
144
+ details = None
145
+ if details_bin := metadata.get("grpc-status-details-bin"):
146
+ value_proto = struct_pb2.Value()
147
+ value_proto.ParseFromString(details_bin)
148
+ details = json_format.MessageToJson(value_proto)
149
+ return IqmServerError(
150
+ message=f"{title}: {message}" if title else message,
151
+ status_code=status_code,
152
+ error_code=error_code,
153
+ details=details,
154
+ )
@@ -0,0 +1,332 @@
1
+ # Copyright 2025 IQM
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """StationControlClient implementation for IQM Server"""
15
+
16
+ from collections.abc import Callable, Iterable
17
+ from contextlib import contextmanager
18
+ import dataclasses
19
+ from io import BytesIO
20
+ import json
21
+ import logging
22
+ from time import sleep
23
+ from typing import Any, TypeVar, cast
24
+ import uuid
25
+
26
+ import grpc
27
+ from iqm.models.channel_properties import ChannelProperties
28
+
29
+ from exa.common.data.setting_node import SettingNode
30
+ from exa.common.data.value import ObservationValue, validate_value
31
+ from iqm.station_control.client.iqm_server import proto
32
+ from iqm.station_control.client.iqm_server.error import IqmServerError
33
+ from iqm.station_control.client.iqm_server.grpc_utils import (
34
+ create_channel,
35
+ extract_error,
36
+ from_proto_uuid,
37
+ load_all,
38
+ parse_connection_params,
39
+ to_datetime,
40
+ to_proto_uuid,
41
+ )
42
+ from iqm.station_control.client.iqm_server.meta_class import IqmServerClientMeta
43
+ from iqm.station_control.client.list_models import DutFieldDataList, DutList
44
+ from iqm.station_control.client.serializers import deserialize_sweep_results, serialize_sweep_task_request
45
+ from iqm.station_control.client.serializers.channel_property_serializer import unpack_channel_properties
46
+ from iqm.station_control.client.serializers.setting_node_serializer import deserialize_setting_node
47
+ from iqm.station_control.client.serializers.task_serializers import deserialize_sweep_task_request
48
+ from iqm.station_control.client.station_control import StationControlClient
49
+ from iqm.station_control.interface.models import (
50
+ DutData,
51
+ DutFieldData,
52
+ Statuses,
53
+ SweepData,
54
+ SweepDefinition,
55
+ SweepResults,
56
+ SweepStatus,
57
+ )
58
+ from iqm.station_control.interface.models.sweep import SweepBase
59
+
60
+ logger = logging.getLogger(__name__)
61
+
62
+ T = TypeVar("T")
63
+
64
+
65
+ class IqmServerClient(StationControlClient, metaclass=IqmServerClientMeta):
66
+ def __init__(
67
+ self,
68
+ root_url: str,
69
+ get_token_callback: Callable[[], str] | None = None,
70
+ grpc_channel: grpc.Channel | None = None,
71
+ ):
72
+ self.root_url = root_url
73
+ self._connection_params = parse_connection_params(root_url)
74
+ self._cached_resources = {}
75
+ self._latest_submitted_sweep = None
76
+ self._channel = grpc_channel or create_channel(self._connection_params, get_token_callback)
77
+ self._current_qc = resolve_current_qc(self._channel, self._connection_params.quantum_computer)
78
+
79
+ def __del__(self):
80
+ try:
81
+ self._channel.close()
82
+ except Exception:
83
+ pass
84
+
85
+ def get_about(self) -> dict:
86
+ return self._get_resource("about", parse_json)
87
+
88
+ def get_configuration(self) -> dict:
89
+ return self._get_resource("configuration", parse_json)
90
+
91
+ def get_settings(self) -> SettingNode:
92
+ return self._get_resource("settings", deserialize_setting_node).copy()
93
+
94
+ def get_chip_design_record(self, dut_label: str) -> dict:
95
+ return self._get_resource(f"chip-design-records/{dut_label}", parse_json)
96
+
97
+ def get_channel_properties(self) -> dict[str, ChannelProperties]:
98
+ return self._get_resource("channel-properties", unpack_channel_properties)
99
+
100
+ def get_duts(self) -> list[DutData]:
101
+ return self._get_resource("duts", lambda data: DutList.model_validate(parse_json(data)))
102
+
103
+ def get_dut_fields(self, dut_label: str) -> list[DutFieldData]:
104
+ return self._get_resource(
105
+ f"dut-fields/{dut_label}", lambda data: DutFieldDataList.model_validate(parse_json(data))
106
+ )
107
+
108
+ def sweep(self, sweep_definition: SweepDefinition) -> dict:
109
+ with wrap_error("Job submission failed"):
110
+ jobs = proto.JobsStub(self._channel)
111
+ job: proto.JobV1 = jobs.SubmitJobV1(
112
+ proto.SubmitJobRequestV1(
113
+ qc_id=self._current_qc.id,
114
+ type=proto.JobType.PULSE,
115
+ payload=serialize_sweep_task_request(sweep_definition, queue_name="sweeps"),
116
+ use_timeslot=self._connection_params.use_timeslot,
117
+ )
118
+ )
119
+ # Optimization: we know that in most of the cases the submitted sweep is queried
120
+ # right after submitting it so we can cache reference to the submitted sweep here
121
+ # to avoid extra request to the server
122
+ job_id = from_proto_uuid(job.id)
123
+ self._latest_submitted_sweep = dataclasses.replace(sweep_definition, sweep_id=job_id)
124
+ return {
125
+ "sweep_id": str(job_id),
126
+ "task_id": str(job_id),
127
+ }
128
+
129
+ def get_sweep(self, sweep_id: uuid.UUID) -> SweepData:
130
+ with wrap_error("Job loading failed"):
131
+ jobs = proto.JobsStub(self._channel)
132
+ job_lookup = proto.JobLookupV1(id=to_proto_uuid(sweep_id))
133
+ job: proto.JobV1 = jobs.GetJobV1(job_lookup)
134
+ # IQM server job does not include any details about the sweep properties so we need to
135
+ # construct the resulting sweep data using the payload (= input sweep) and metadata
136
+ # from the IQM server job
137
+ sweep = self._get_cached_sweep(sweep_id) or payload_to_sweep(load_all(jobs.GetJobPayloadV1(job_lookup)))
138
+ return SweepData(
139
+ created_timestamp=to_datetime(job.created_at),
140
+ modified_timestamp=to_datetime(job.updated_at),
141
+ begin_timestamp=to_datetime(job.execution_started_at) if job.HasField("execution_started_at") else None,
142
+ end_timestamp=to_datetime(job.execution_ended_at) if job.HasField("execution_ended_at") else None,
143
+ sweep_status=to_sweep_status(job.status),
144
+ # Sweep definition is a subclass of SweepBase so we can just copy all SweepBase fields
145
+ # from the input sweep to the sweep data
146
+ **{f.name: getattr(sweep, f.name) for f in dataclasses.fields(SweepBase)},
147
+ )
148
+
149
+ def get_sweep_results(self, sweep_id: uuid.UUID) -> SweepResults:
150
+ with wrap_error("Job result loading failed"):
151
+ jobs = proto.JobsStub(self._channel)
152
+ data_chunks = jobs.GetJobResultsV1(proto.JobLookupV1(id=to_proto_uuid(sweep_id)))
153
+ return deserialize_sweep_results(load_all(data_chunks))
154
+
155
+ def revoke_sweep(self, sweep_id: uuid.UUID) -> None:
156
+ with wrap_error("Job cancellation failed"):
157
+ jobs = proto.JobsStub(self._channel)
158
+ jobs.CancelJobV1(proto.JobLookupV1(id=to_proto_uuid(sweep_id)))
159
+
160
+ def get_task(self, task_id: uuid.UUID) -> dict:
161
+ with wrap_error("Job loading failed"):
162
+ jobs = proto.JobsStub(self._channel)
163
+ job: proto.JobV1 = jobs.GetJobV1(proto.JobLookupV1(id=to_proto_uuid(task_id)))
164
+ return {
165
+ # It would be nice to have these typed somewhere...
166
+ "task_id": str(from_proto_uuid(job.id)),
167
+ "task_status": to_task_status(job.status),
168
+ "task_result": {"message": ""},
169
+ "task_error": job.error if job.HasField("error") else "",
170
+ "position": job.queue_position if job.HasField("queue_position") else None,
171
+ "is_position_capped": False,
172
+ }
173
+
174
+ def _wait_task_completion(
175
+ self,
176
+ task_id: str,
177
+ update_progress_callback: Callable[[Statuses], None] | None,
178
+ ) -> bool:
179
+ with wrap_error("Job subscription failed"):
180
+ try:
181
+ notify = update_progress_callback or (lambda _: None)
182
+ job_id = uuid.UUID(task_id)
183
+ initial_queue_position = None
184
+ status = None
185
+ # SubscribeToJobV1 runs until job reaches its final status (completed, failed, interrupted)
186
+ job_events = subscribe_to_job_events(self._channel, job_id)
187
+ for job in job_events:
188
+ status = job.status
189
+ if status == proto.JobStatus.IN_QUEUE:
190
+ if initial_queue_position is None:
191
+ initial_queue_position = job.queue_position
192
+ queue_progress = initial_queue_position - job.queue_position
193
+ notify([("Progress in queue", queue_progress, initial_queue_position)])
194
+ # In case of success, mark progress bar to 100% (looks nicer)
195
+ if initial_queue_position is not None and status == proto.JobStatus.COMPLETED:
196
+ notify([("Progress in queue", initial_queue_position, initial_queue_position)])
197
+ return False
198
+ except KeyboardInterrupt:
199
+ return True
200
+
201
+ def get_calibration_set_values(self, calibration_set_id: uuid.UUID) -> dict[str, ObservationValue]:
202
+ with wrap_error("Calibration set loading failed"):
203
+ calibrations = proto.CalibrationsStub(self._channel)
204
+ data_chunks = calibrations.GetFullCalibrationDataV1(
205
+ proto.CalibrationLookupV1(
206
+ id=to_proto_uuid(calibration_set_id),
207
+ )
208
+ )
209
+ _, cal_set_values = parse_calibration_set(load_all(data_chunks))
210
+ return cal_set_values
211
+
212
+ def get_latest_calibration_set_id(self, dut_label: str) -> uuid.UUID:
213
+ with wrap_error("Calibration set metadata loading failed"):
214
+ calibrations = proto.CalibrationsStub(self._channel)
215
+ metadata: proto.CalibrationMetadataV1 = calibrations.GetLatestQuantumComputerCalibrationV1(
216
+ proto.LatestQuantumComputerCalibrationLookupV1(
217
+ qc_id=self._current_qc.id,
218
+ )
219
+ )
220
+ if metadata.dut_label != dut_label:
221
+ raise ValueError(f"No calibration set for dut_label = {dut_label}")
222
+ return from_proto_uuid(metadata.id)
223
+
224
+ def _get_cached_sweep(self, sweep_id: uuid.UUID) -> SweepDefinition | None:
225
+ latest_submitted = self._latest_submitted_sweep
226
+ if latest_submitted and latest_submitted.sweep_id == sweep_id:
227
+ return latest_submitted
228
+ return None
229
+
230
+ def _get_resource(self, resource_name: str, deserialize: Callable[[bytes], T]) -> T:
231
+ with wrap_error(f"Failed to load QC resource '{resource_name}'"):
232
+ if (cached := self._cached_resources.get(resource_name)) is not None:
233
+ return cached
234
+ qcs = proto.QuantumComputersStub(self._channel)
235
+ data_chunks = qcs.GetQuantumComputerResourceV1(
236
+ proto.QuantumComputerResourceLookupV1(
237
+ qc_id=self._current_qc.id,
238
+ resource_name=resource_name,
239
+ )
240
+ )
241
+ resource = deserialize(load_all(data_chunks))
242
+ self._cached_resources[resource_name] = resource
243
+ return resource
244
+
245
+
246
+ def resolve_current_qc(channel: grpc.Channel, alias: str) -> proto.QuantumComputerV1:
247
+ qcs = proto.QuantumComputersStub(channel)
248
+ qc_list: proto.QuantumComputersListV1 = qcs.ListQuantumComputersV1(proto.ListQuantumComputerFiltersV1())
249
+ for qc in qc_list.items:
250
+ if qc.alias == alias:
251
+ return qc
252
+ raise ValueError(f"Quantum computer '{alias}' does not exist")
253
+
254
+
255
+ def subscribe_to_job_events(channel: grpc.Channel, job_id: uuid.UUID) -> Iterable[proto.JobV1]:
256
+ jobs = proto.JobsStub(channel)
257
+ attempts = 1
258
+ while True:
259
+ try:
260
+ events = jobs.SubscribeToJobV1(proto.JobLookupV1(id=to_proto_uuid(job_id)))
261
+ for event in events:
262
+ job_event = cast(proto.JobEventV1, event)
263
+ if job_event.HasField("update"):
264
+ yield job_event.update
265
+ return
266
+ except grpc.RpcError as e:
267
+ # Server may cancel subscription due to e.g. restarts, in which case we can just retry after some waiting
268
+ error = extract_error(e)
269
+ if error.error_code == "server_cancel" and attempts <= 10:
270
+ attempts += 1
271
+ sleep(5)
272
+ continue
273
+ raise e
274
+
275
+
276
+ def parse_calibration_set(cal_set_data: bytes) -> tuple[uuid.UUID, dict[str, ObservationValue]]:
277
+ # IQM server calibration sets are in cocos calibration set JSON format, we can get
278
+ # both id and observations from it
279
+ cal_set = parse_json(cal_set_data)
280
+ cal_set_id = cal_set["calibration_set_id"]
281
+ observations = cal_set.get("observations", {})
282
+ cal_set_values = {k: validate_value(v["value"]) for k, v in observations.items()}
283
+ return cal_set_id, cal_set_values
284
+
285
+
286
+ def payload_to_sweep(job_payload: bytes) -> SweepDefinition:
287
+ sweep, _ = deserialize_sweep_task_request(job_payload)
288
+ return sweep
289
+
290
+
291
+ def to_sweep_status(job_status: proto.JobStatus) -> SweepStatus:
292
+ match job_status:
293
+ case proto.JobStatus.IN_QUEUE:
294
+ return SweepStatus.PENDING
295
+ case proto.JobStatus.EXECUTING:
296
+ return SweepStatus.PROGRESS
297
+ case proto.JobStatus.FAILED:
298
+ return SweepStatus.FAILURE
299
+ case proto.JobStatus.COMPLETED:
300
+ return SweepStatus.SUCCESS
301
+ case proto.JobStatus.INTERRUPTED:
302
+ return SweepStatus.INTERRUPTED
303
+ case proto.JobStatus.CANCELLED:
304
+ return SweepStatus.REVOKED
305
+ raise ValueError(f"Unknown job status: '{job_status}'")
306
+
307
+
308
+ def to_task_status(job_status: proto.JobStatus) -> str:
309
+ match job_status:
310
+ case proto.JobStatus.IN_QUEUE:
311
+ return "PENDING"
312
+ case proto.JobStatus.EXECUTING:
313
+ return "STARTED"
314
+ case proto.JobStatus.FAILED | proto.JobStatus.INTERRUPTED | proto.JobStatus.CANCELLED:
315
+ return "FAILURE"
316
+ case proto.JobStatus.COMPLETED:
317
+ return "SUCCESS"
318
+ raise ValueError(f"Unknown job status: '{job_status}'")
319
+
320
+
321
+ def parse_json(data: bytes) -> Any:
322
+ return json.load(BytesIO(data))
323
+
324
+
325
+ @contextmanager
326
+ def wrap_error(title: str):
327
+ try:
328
+ yield
329
+ except grpc.RpcError as e:
330
+ raise extract_error(e, title) from e
331
+ except Exception as e:
332
+ raise IqmServerError(message=f"{title}: {e}", status_code=str(grpc.StatusCode.INTERNAL.name)) from e
@@ -0,0 +1,38 @@
1
+ # Copyright 2025 IQM
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ import inspect
15
+
16
+
17
+ class IqmServerClientMeta(type):
18
+ """Custom metaclass that automatically adds '<feature> not implemented'
19
+ stub implementations for all StationControlClient functions that are
20
+ not overridden by IqmServerClient.
21
+ """
22
+
23
+ def __new__(cls, name, bases, dct):
24
+ for f_name, _ in inspect.getmembers(
25
+ bases[0], predicate=lambda m: inspect.isfunction(m) and not m.__name__.startswith("__")
26
+ ):
27
+ if f_name not in dct:
28
+ dct[f_name] = _not_implemented_stub(f_name)
29
+ return super().__new__(cls, name, bases, dct)
30
+
31
+
32
+ def _not_implemented_stub(feature: str):
33
+ """Generate a function that raises NotImplementedError."""
34
+
35
+ def stub(*args, **kwargs):
36
+ raise NotImplementedError(f"'{feature}' is not implemented for this backend")
37
+
38
+ return stub
@@ -0,0 +1,43 @@
1
+ # Copyright 2025 IQM
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ from .calibration_pb2 import (
15
+ CalibrationLookupV1,
16
+ CalibrationMetadataV1,
17
+ LatestQuantumComputerCalibrationLookupV1,
18
+ )
19
+ from .calibration_pb2_grpc import CalibrationsStub, CalibrationsServicer, Calibrations
20
+ from .common_pb2 import Empty, DataChunk, Keepalive
21
+ from .job_pb2 import (
22
+ JobType,
23
+ JobStatus,
24
+ JobInputSummaryV1,
25
+ JobV1,
26
+ JobLookupV1,
27
+ SubmitJobRequestV1,
28
+ JobEventV1,
29
+ )
30
+ from .job_pb2_grpc import JobsStub, JobsServicer, Jobs
31
+ from .qc_pb2 import (
32
+ QuantumComputerLookupV1,
33
+ QuantumComputerResourceLookupV1,
34
+ ListQuantumComputerFiltersV1,
35
+ QuantumComputerV1,
36
+ QuantumComputersListV1,
37
+ )
38
+ from .qc_pb2_grpc import (
39
+ QuantumComputersStub,
40
+ QuantumComputersServicer,
41
+ QuantumComputers,
42
+ )
43
+ from .uuid_pb2 import Uuid
@@ -0,0 +1,48 @@
1
+ # Copyright 2025 IQM
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ # -*- coding: utf-8 -*-
15
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
16
+ # source: calibration.proto
17
+ # Protobuf Python Version: 4.25.1
18
+ """Generated protocol buffer code."""
19
+ from google.protobuf import descriptor as _descriptor
20
+ from google.protobuf import descriptor_pool as _descriptor_pool
21
+ from google.protobuf import symbol_database as _symbol_database
22
+ from google.protobuf.internal import builder as _builder
23
+ # @@protoc_insertion_point(imports)
24
+
25
+ _sym_db = _symbol_database.Default()
26
+
27
+
28
+ from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
29
+ from . import common_pb2 as common__pb2
30
+ from . import uuid_pb2 as uuid__pb2
31
+
32
+
33
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x11\x63\x61libration.proto\x12\niqm.server\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x0c\x63ommon.proto\x1a\nuuid.proto\"K\n(LatestQuantumComputerCalibrationLookupV1\x12\x1f\n\x05qc_id\x18\x01 \x02(\x0b\x32\x10.iqm.server.Uuid\"3\n\x13\x43\x61librationLookupV1\x12\x1c\n\x02id\x18\x01 \x02(\x0b\x32\x10.iqm.server.Uuid\"\x8a\x01\n\x15\x43\x61librationMetadataV1\x12\x1c\n\x02id\x18\x01 \x02(\x0b\x32\x10.iqm.server.Uuid\x12.\n\ncreated_at\x18\x02 \x02(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x11\n\tdut_label\x18\x03 \x02(\t\x12\x10\n\x08is_valid\x18\x04 \x02(\x08\x32\xbf\x02\n\x0c\x43\x61librations\x12\x80\x01\n%GetLatestQuantumComputerCalibrationV1\x12\x34.iqm.server.LatestQuantumComputerCalibrationLookupV1\x1a!.iqm.server.CalibrationMetadataV1\x12V\n\x10GetCalibrationV1\x12\x1f.iqm.server.CalibrationLookupV1\x1a!.iqm.server.CalibrationMetadataV1\x12T\n\x18GetFullCalibrationDataV1\x12\x1f.iqm.server.CalibrationLookupV1\x1a\x15.iqm.server.DataChunk0\x01')
34
+
35
+ _globals = globals()
36
+ _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
37
+ _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'calibration_pb2', _globals)
38
+ if _descriptor._USE_C_DESCRIPTORS == False:
39
+ DESCRIPTOR._options = None
40
+ _globals['_LATESTQUANTUMCOMPUTERCALIBRATIONLOOKUPV1']._serialized_start=92
41
+ _globals['_LATESTQUANTUMCOMPUTERCALIBRATIONLOOKUPV1']._serialized_end=167
42
+ _globals['_CALIBRATIONLOOKUPV1']._serialized_start=169
43
+ _globals['_CALIBRATIONLOOKUPV1']._serialized_end=220
44
+ _globals['_CALIBRATIONMETADATAV1']._serialized_start=223
45
+ _globals['_CALIBRATIONMETADATAV1']._serialized_end=361
46
+ _globals['_CALIBRATIONS']._serialized_start=364
47
+ _globals['_CALIBRATIONS']._serialized_end=683
48
+ # @@protoc_insertion_point(module_scope)