iqm-client 32.1.1__py3-none-any.whl → 33.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. iqm/cirq_iqm/devices/iqm_device_metadata.py +2 -1
  2. iqm/cirq_iqm/examples/demo_common.py +1 -1
  3. iqm/cirq_iqm/examples/demo_iqm_execution.py +3 -3
  4. iqm/cirq_iqm/iqm_sampler.py +47 -29
  5. iqm/cirq_iqm/serialize.py +1 -1
  6. iqm/cirq_iqm/transpiler.py +3 -1
  7. iqm/iqm_client/__init__.py +0 -2
  8. iqm/iqm_client/errors.py +6 -17
  9. iqm/iqm_client/iqm_client.py +199 -602
  10. iqm/iqm_client/models.py +20 -611
  11. iqm/iqm_client/transpile.py +11 -8
  12. iqm/iqm_client/validation.py +18 -9
  13. iqm/iqm_server_client/__init__.py +14 -0
  14. iqm/iqm_server_client/errors.py +6 -0
  15. iqm/iqm_server_client/iqm_server_client.py +755 -0
  16. iqm/iqm_server_client/models.py +179 -0
  17. iqm/iqm_server_client/py.typed +0 -0
  18. iqm/qiskit_iqm/__init__.py +8 -0
  19. iqm/qiskit_iqm/examples/bell_measure.py +5 -5
  20. iqm/qiskit_iqm/examples/transpile_example.py +13 -6
  21. iqm/qiskit_iqm/fake_backends/fake_adonis.py +2 -1
  22. iqm/qiskit_iqm/fake_backends/fake_aphrodite.py +2 -1
  23. iqm/qiskit_iqm/fake_backends/fake_apollo.py +2 -1
  24. iqm/qiskit_iqm/fake_backends/fake_deneb.py +2 -1
  25. iqm/qiskit_iqm/fake_backends/iqm_fake_backend.py +8 -7
  26. iqm/qiskit_iqm/iqm_backend.py +3 -4
  27. iqm/qiskit_iqm/iqm_circuit_validation.py +8 -7
  28. iqm/qiskit_iqm/iqm_job.py +106 -88
  29. iqm/qiskit_iqm/iqm_move_layout.py +2 -1
  30. iqm/qiskit_iqm/iqm_naive_move_pass.py +114 -55
  31. iqm/qiskit_iqm/iqm_provider.py +49 -36
  32. iqm/qiskit_iqm/iqm_target.py +4 -6
  33. iqm/qiskit_iqm/qiskit_to_iqm.py +62 -25
  34. {iqm_client-32.1.1.dist-info → iqm_client-33.0.1.dist-info}/METADATA +17 -24
  35. iqm_client-33.0.1.dist-info/RECORD +63 -0
  36. iqm/iqm_client/api.py +0 -90
  37. iqm/iqm_client/authentication.py +0 -206
  38. iqm_client-32.1.1.dist-info/RECORD +0 -60
  39. {iqm_client-32.1.1.dist-info → iqm_client-33.0.1.dist-info}/AUTHORS.rst +0 -0
  40. {iqm_client-32.1.1.dist-info → iqm_client-33.0.1.dist-info}/LICENSE.txt +0 -0
  41. {iqm_client-32.1.1.dist-info → iqm_client-33.0.1.dist-info}/WHEEL +0 -0
  42. {iqm_client-32.1.1.dist-info → iqm_client-33.0.1.dist-info}/entry_points.txt +0 -0
  43. {iqm_client-32.1.1.dist-info → iqm_client-33.0.1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,755 @@
1
+ # Copyright 2025 IQM
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """Client implementation for IQM Server REST API."""
15
+
16
+ from __future__ import annotations
17
+
18
+ import abc
19
+ from collections.abc import Callable
20
+ from dataclasses import dataclass
21
+ from datetime import datetime
22
+ from functools import cache
23
+ from importlib.metadata import version
24
+ import json
25
+ import logging
26
+ import os
27
+ import platform
28
+ from time import sleep
29
+ from typing import Any, Literal, TypeAlias, TypeVar
30
+ from urllib.parse import urlparse
31
+ from uuid import UUID
32
+ import warnings
33
+
34
+ from iqm.iqm_server_client.errors import ClientConfigurationError
35
+ from iqm.iqm_server_client.models import (
36
+ CalibrationSet,
37
+ JobData,
38
+ JobStatus,
39
+ ListQuantumComputersResponse,
40
+ QualityMetricSet,
41
+ Source,
42
+ TimelineEntry,
43
+ )
44
+ from iqm.models.channel_properties import ChannelProperties
45
+ from opentelemetry import propagate, trace
46
+ from pydantic import BaseModel, TypeAdapter
47
+ import requests
48
+
49
+ from exa.common.data.setting_node import SettingNode
50
+ from exa.common.errors.station_control_errors import map_from_status_code_to_error
51
+ from iqm.station_control.client.authentication import TokenManager
52
+ from iqm.station_control.client.list_models import (
53
+ DutList,
54
+ ListModel,
55
+ StaticQuantumArchitectureList,
56
+ )
57
+ from iqm.station_control.client.serializers import (
58
+ deserialize_sweep_results,
59
+ serialize_sweep_job_request,
60
+ )
61
+ from iqm.station_control.client.serializers.channel_property_serializer import unpack_channel_properties
62
+ from iqm.station_control.client.serializers.setting_node_serializer import deserialize_setting_node
63
+ from iqm.station_control.client.utils import get_progress_bar_callback
64
+ from iqm.station_control.interface.models import (
65
+ CircuitMeasurementCountsBatch,
66
+ CircuitMeasurementResultsBatch,
67
+ DutData,
68
+ DynamicQuantumArchitecture,
69
+ ProgressCallback,
70
+ RunRequest,
71
+ StaticQuantumArchitecture,
72
+ StrUUID,
73
+ SweepDefinition,
74
+ SweepResults,
75
+ )
76
+ from iqm.station_control.interface.pydantic_base import PydanticBase
77
+
78
+ logger = logging.getLogger(__name__)
79
+
80
+ TypePydanticBase = TypeVar("TypePydanticBase", bound=PydanticBase)
81
+ CircuitMeasurementResultsBatchAdapter = TypeAdapter(CircuitMeasurementResultsBatch)
82
+ CircuitCountsBatchAdapter = TypeAdapter(CircuitMeasurementCountsBatch)
83
+
84
+ StrUUIDOrDefault: TypeAlias = str | UUID | Literal["default"]
85
+
86
+ REQUESTS_TIMEOUT = float(os.environ.get("IQM_CLIENT_REQUESTS_TIMEOUT", 120))
87
+
88
+ _POLLING_INTERVAL: float = float(os.environ.get("IQM_CLIENT_SECONDS_BETWEEN_CALLS", 1.0))
89
+ """IQM Server polling interval (in seconds)."""
90
+
91
+ DEFAULT_TIMEOUT_SECONDS: float = 900.0
92
+ """Default timeout for waiting a job to finish."""
93
+
94
+
95
+ # INTERNAL: _IQMServerClient is an unstable, private API.
96
+ # It may change without notice. Do not rely on it outside this package.
97
+ class _IQMServerClient:
98
+ """Client implementation for IQM Server REST API.
99
+
100
+ Args:
101
+ iqm_server_url: Remote IQM Server URL to connect to.
102
+ quantum_computer: ID or alias of the quantum computer to connect to, if the IQM Server
103
+ instance controls more than one.
104
+ token: Long-lived IQM token in plain text format.
105
+ tokens_file: Path to a tokens file used for authentication.
106
+ client_signature: String that is added to the User-Agent header of requests
107
+ sent to the server.
108
+ enable_opentelemetry: Iff True, enable Jaeger/OpenTelemetry tracing.
109
+ timeout: Timeout for the request in seconds.
110
+
111
+ """
112
+
113
+ @staticmethod
114
+ def normalize_url(iqm_server_url: str, quantum_computer: str | None) -> tuple[str, str | None, bool]:
115
+ """Validate the connection details, provide some backwards compatibility."""
116
+ # Security measure: mitigate UTF-8 read order control character
117
+ # exploits by allowing only ASCII urls
118
+ if not iqm_server_url.isascii():
119
+ raise ClientConfigurationError(f"Non-ASCII characters in URL: {iqm_server_url}")
120
+ try:
121
+ url = urlparse(iqm_server_url)
122
+ except Exception as e:
123
+ raise ClientConfigurationError(f"Invalid URL: {iqm_server_url}") from e
124
+
125
+ if url.scheme not in {"http", "https"}:
126
+ raise ClientConfigurationError(
127
+ f"The URL schema has to be http or https. Incorrect schema in URL: {iqm_server_url}"
128
+ )
129
+
130
+ hostname = url.hostname or ""
131
+ path_segments = (url.path or "").split("/")
132
+
133
+ # Compatibility: to maintain the API compatibility, Resonance URLs included the quantum computer
134
+ # alias as a path prefix and served the Cocos/SC compatible API per QC under that prefix.
135
+ # Now that this new IQM client implementation does not have such compatibility requirements,
136
+ # the preferred way is to explicitly set the connected quantum computer alias as an initialization
137
+ # parameter, e.g. IQMClient(root_url, quantum_computer="garnet") and also target jobs for
138
+ # timeslot explicitly when submitting the job e.g. client.submit_circuits(..., use_timeslot=True).
139
+ # However, we want to maintain some sort of compatibility by allowing such URLs for now and
140
+ # warn the user about the "old-style" usage. Once this version of client is released, the Resonance
141
+ # documentation can be updated to instruct the "new-style" client initialization, and eventually
142
+ # we can also drop this compatibility code.
143
+ quantum_computer_from_path = path_segments[-1].removesuffix(":timeslot")
144
+ if quantum_computer is not None and quantum_computer_from_path:
145
+ raise ClientConfigurationError(
146
+ "The IQM Server URL must not contain quantum computer name when initializing client with "
147
+ + "explicit quantum computer name. To fix this error, use server base url."
148
+ )
149
+ if quantum_computer_from_path:
150
+ quantum_computer = quantum_computer_from_path
151
+ warnings.warn(
152
+ "The given IQM Server URL is in a deprecated format, see the client initialization instructions "
153
+ + "and correct URL format from the server web dashboard."
154
+ )
155
+
156
+ # Same for timeslots: the timeslot / FIFO queue selection had to be embedded into URL, whereas in this
157
+ # new implementation, the explicit timeslot usage is preferred upon the actual job submission. Fixing
158
+ # compatibility but giving a warning about the deprecated usage.
159
+ use_timeslot_default = path_segments[-1].endswith(":timeslot")
160
+ if use_timeslot_default:
161
+ warnings.warn(
162
+ "Quantum computer timeslot URL is deprecated. Jobs can be submitted to timeslots by using "
163
+ + "`use_timeslot=True` parameter per job. See the server web dashboard or "
164
+ + "https://docs.meetiqm.com/iqm-client/ for more detailed instructions."
165
+ )
166
+
167
+ # Same here; the "cocos" subdomain was used to handle the backwards compatibility so we can just drop
168
+ # it now and give a warning. This only resonance specific compatibility change so we can be precise
169
+ # in the warning message.
170
+ if hostname.startswith("cocos."):
171
+ hostname = hostname.removeprefix("cocos.")
172
+ warnings.warn(
173
+ "Resonance CoCoS API is deprecated. Use https://resonance.meetiqm.com. See the Resonance "
174
+ + "documentation or https://docs.meetiqm.com/iqm-client/ for more detailed instructions."
175
+ )
176
+
177
+ # Use hostname without "cocos" subdomain and quantum computer name
178
+ port_suffix = f":{url.port}" if url.port else ""
179
+ netloc = "/".join([hostname] + path_segments[:-1]).rstrip("/")
180
+ base_url = f"{url.scheme}://{netloc}{port_suffix}"
181
+
182
+ return base_url, quantum_computer, use_timeslot_default
183
+
184
+ def __init__(
185
+ self,
186
+ iqm_server_url: str,
187
+ *,
188
+ quantum_computer: str | None = None,
189
+ token: str | None = None,
190
+ tokens_file: str | None = None,
191
+ client_signature: str | None = None,
192
+ enable_opentelemetry: bool = False,
193
+ timeout: float = REQUESTS_TIMEOUT,
194
+ ):
195
+ root_url, quantum_computer, use_timeslot_default = _IQMServerClient.normalize_url(
196
+ iqm_server_url, quantum_computer
197
+ )
198
+ self.root_url = root_url
199
+ # authentication
200
+ tm = TokenManager(token, tokens_file)
201
+ self._token_manager = tm
202
+ self._auth_header_callback = tm.get_auth_header_callback()
203
+
204
+ self._signature = self._create_signature(client_signature)
205
+
206
+ self._enable_opentelemetry = enable_opentelemetry
207
+ self._timeout = timeout
208
+ self._quantum_computer = self._resolve_quantum_computer(quantum_computer)
209
+ self._use_timeslot = use_timeslot_default
210
+
211
+ @property
212
+ def api_version(self) -> str:
213
+ """API version of the IQM Server API this client is using."""
214
+ return "v1"
215
+
216
+ @property
217
+ def quantum_computer(self) -> str:
218
+ """Human-readable alias of the quantum computer this client connects to."""
219
+ return self._quantum_computer
220
+
221
+ def get_health(self) -> dict[str, Any]:
222
+ """Get the status of the IQM Server."""
223
+ response = self._send_request(requests.get, f"quantum-computers/{self._quantum_computer}/health")
224
+ return response.json()
225
+
226
+ @cache
227
+ def get_about(self) -> dict[str, Any]:
228
+ """Get information about the IQM Server."""
229
+ response = self._send_request(requests.get, f"quantum-computers/{self._quantum_computer}/artifacts/about")
230
+ return response.json()
231
+
232
+ def get_settings(self) -> SettingNode:
233
+ """Tree representation of the default settings of the quantum computer,
234
+ as defined in the configuration files.
235
+ """
236
+ return self._get_cached_settings().model_copy()
237
+
238
+ @cache
239
+ def _get_cached_settings(self) -> SettingNode:
240
+ headers = {"Accept": "application/protobuf"}
241
+ response = self._send_request(
242
+ requests.get, f"quantum-computers/{self._quantum_computer}/artifacts/settings", headers=headers
243
+ )
244
+ return deserialize_setting_node(response.content)
245
+
246
+ @cache
247
+ def get_chip_design_records(self) -> list[dict[str, Any]]:
248
+ """Get the chip design records of the quantum computer."""
249
+ response = self._send_request(
250
+ requests.get, f"quantum-computers/{self._quantum_computer}/artifacts/chip-design-records"
251
+ )
252
+ return response.json()
253
+
254
+ @cache
255
+ def get_channel_properties(self) -> dict[str, ChannelProperties]:
256
+ """Get the channel properties from the quantum computer.
257
+
258
+ Channel properties contain information about the hardware limitations, e.g. the sample rate,
259
+ granularity and supported instructions for the various control channels.
260
+
261
+ Returns:
262
+ Mapping from channel name to AWGProperties or ReadoutProperties.
263
+
264
+ """
265
+ headers = {"Accept": "application/protobuf"}
266
+ response = self._send_request(
267
+ requests.get, f"quantum-computers/{self._quantum_computer}/artifacts/channel-properties", headers=headers
268
+ )
269
+ decoded_dict = unpack_channel_properties(response.content)
270
+ return decoded_dict
271
+
272
+ def get_duts(self) -> list[DutData]:
273
+ """Get the DUT(s) of the quantum computer."""
274
+ response = self._send_request(requests.get, f"quantum-computers/{self._quantum_computer}/artifacts/duts")
275
+ return self._deserialize_response(response, DutList)
276
+
277
+ def get_static_quantum_architectures(self) -> list[StaticQuantumArchitecture]:
278
+ """Get the static quantum architecture(s) of the quantum computer."""
279
+ response = self._send_request(
280
+ requests.get, f"quantum-computers/{self._quantum_computer}/artifacts/static-quantum-architectures"
281
+ )
282
+ return self._deserialize_response(response, StaticQuantumArchitectureList)
283
+
284
+ def get_calibration_set(self, calibration_set_id: StrUUIDOrDefault) -> CalibrationSet:
285
+ """Get a calibration set from the database."""
286
+ response = self._send_request(requests.get, f"calibration-sets/{self._quantum_computer}/{calibration_set_id}")
287
+ return self._deserialize_response(response, CalibrationSet)
288
+
289
+ def get_dynamic_quantum_architecture(self, calibration_set_id: StrUUIDOrDefault) -> DynamicQuantumArchitecture:
290
+ """Get the dynamic quantum architecture for the given calibration set ID.
291
+
292
+ Returns:
293
+ Dynamic quantum architecture of the quantum computer for the given calibration set ID.
294
+
295
+ """
296
+ response = self._send_request(
297
+ requests.get, f"calibration-sets/{self._quantum_computer}/{calibration_set_id}/dynamic-quantum-architecture"
298
+ )
299
+ return self._deserialize_response(response, DynamicQuantumArchitecture)
300
+
301
+ def get_calibration_set_quality_metric_set(self, calibration_set_id: StrUUIDOrDefault) -> QualityMetricSet:
302
+ """Get the latest quality metric set for the given calibration set ID."""
303
+ response = self._send_request(
304
+ requests.get,
305
+ f"calibration-sets/{self._quantum_computer}/{calibration_set_id}/metrics",
306
+ )
307
+ return self._deserialize_response(response, QualityMetricSet)
308
+
309
+ def submit_sweep(
310
+ self,
311
+ sweep_definition: SweepDefinition,
312
+ use_timeslot: bool = False,
313
+ ) -> JobData:
314
+ """Submit an N-dimensional sweep job for execution.
315
+
316
+ Args:
317
+ sweep_definition: The content of the sweep to be created.
318
+ use_timeslot: If ``True`` submit the job to the timeslot queue, otherwise
319
+ submit it to the shared FIFO queue.
320
+
321
+ Returns:
322
+ Upon successful submission: sweep job data, including the job ID that can be used to track it.
323
+
324
+ """
325
+ data = serialize_sweep_job_request(sweep_definition, queue_name="sweeps")
326
+ return self._submit_job(job_type="sweep", protobuf_data=data, use_timeslot=use_timeslot)
327
+
328
+ def submit_circuits(self, run_request: RunRequest, use_timeslot: bool = False) -> JobData:
329
+ """Submit a batch of quantum circuits for execution.
330
+
331
+ Args:
332
+ run_request: Circuit execution request.
333
+ use_timeslot: If ``True`` submit the job to the timeslot queue, otherwise
334
+ submit it to the shared FIFO queue.
335
+
336
+ Returns:
337
+ Upon successful submission: circuit job data, including the job ID that can be used to track it.
338
+
339
+ """
340
+ data = self._serialize_model(run_request)
341
+ return self._submit_job(job_type="circuit", json_data=data, use_timeslot=use_timeslot)
342
+
343
+ def get_job(self, job_id: StrUUID) -> JobData:
344
+ """Get the current status and metadata of the job."""
345
+ response = self._send_request(requests.get, f"jobs/{job_id}")
346
+ return self._deserialize_response(response, JobData)
347
+
348
+ def cancel_job(self, job_id: StrUUID) -> None:
349
+ """Cancel a job.
350
+
351
+ If execution is in progress, it will be interrupted after the current sweep spot.
352
+
353
+ Args:
354
+ job_id: The ID of the job to cancel.
355
+
356
+ """
357
+ self._send_request(requests.post, f"jobs/{job_id}/cancel")
358
+
359
+ def delete_job(self, job_id: StrUUID) -> None:
360
+ """Delete a job with the given ID."""
361
+ self._send_request(requests.delete, f"jobs/{job_id}")
362
+
363
+ def get_submit_circuits_payload(self, job_id: StrUUID) -> RunRequest:
364
+ """Get the job payload, i.e. the contents of the run request sent to IQM Server."""
365
+ response = self._send_request(requests.get, f"jobs/{job_id}/payload")
366
+ return self._deserialize_response(response, RunRequest)
367
+
368
+ def get_job_artifact_sweep_results(self, job_id: StrUUID) -> SweepResults:
369
+ """Get N-dimensional sweep results from the database for the given sweep job."""
370
+ response = self._send_request(requests.get, f"jobs/{job_id}/artifacts/sweep_results")
371
+ return deserialize_sweep_results(response.content)
372
+
373
+ def get_job_artifact_measurements(self, job_id: StrUUID) -> CircuitMeasurementResultsBatch:
374
+ """Get the "measurements" artifact of the given circuit job."""
375
+ response = self._send_request(requests.get, f"jobs/{job_id}/artifacts/measurements")
376
+ return CircuitMeasurementResultsBatchAdapter.validate_json(response.text)
377
+
378
+ def get_job_artifact_measurement_counts(self, job_id: StrUUID) -> CircuitMeasurementCountsBatch:
379
+ """Get the "measurement_counts" artifact of the given circuit job."""
380
+ response = self._send_request(requests.get, f"jobs/{job_id}/artifacts/measurement_counts")
381
+ return CircuitCountsBatchAdapter.validate_json(response.text)
382
+
383
+ def _submit_job(
384
+ self,
385
+ job_type: str,
386
+ json_data: str | None = None,
387
+ protobuf_data: bytes | None = None,
388
+ use_timeslot: bool = False,
389
+ ) -> JobData:
390
+ """Submit a job for execution."""
391
+ params = _serialize_query_params({"use_timeslot": use_timeslot or self._use_timeslot})
392
+ response = self._send_request(
393
+ requests.post,
394
+ f"jobs/{self._quantum_computer}/{job_type}",
395
+ params=params,
396
+ json_data=json_data,
397
+ protobuf_data=protobuf_data,
398
+ )
399
+ return self._deserialize_response(response, JobData)
400
+
401
+ def _wait_job_completion(
402
+ self,
403
+ job_id: StrUUID,
404
+ *,
405
+ progress_callback: ProgressCallback | None,
406
+ timeout_secs: float = 0.0,
407
+ ) -> JobData:
408
+ """Wait for the completion of a job.
409
+
410
+ Polls the server, updating ``progress_callback``, until the job is in a terminal state.
411
+ Will stop the polling upon receiving a KeyboardInterrupt (Ctrl-C).
412
+ Does not cancel the job.
413
+
414
+ Args:
415
+ job_id: The ID of the job to wait for.
416
+ progress_callback: If not None, used to report the job progress to the caller while waiting.
417
+ Called with the relevant progress indicator info after each poll.
418
+ timeout_secs: If nonzero, give up after this time (in seconds), returning the last (nonterminal) status.
419
+
420
+ Returns:
421
+ Last seen job data.
422
+
423
+ """
424
+ logger.info("Waiting for job %s to finish...", job_id)
425
+ progress_callback = progress_callback or (lambda status: None)
426
+ # TODO How should the progress meter work? all the progress items should be in one list.
427
+ max_seen_queue_position = 0
428
+ start_time = datetime.now()
429
+ while True:
430
+ job_data = self.get_job(job_id)
431
+ if job_data.queue_position is not None:
432
+ # still in iqm-server queue
433
+ position = job_data.queue_position
434
+ max_seen_queue_position = max(max_seen_queue_position, position)
435
+ progress_callback([("Progress in queue", max_seen_queue_position - position, max_seen_queue_position)])
436
+ elif (execution := job_data.execution) is not None:
437
+ # convert the progress info into the old format, report it using the callback
438
+ statuses = [(label, v.value, v.max_value) for label, v in execution.progress.items()]
439
+ progress_callback(statuses)
440
+
441
+ if job_data.status in JobStatus.terminal_statuses():
442
+ return job_data
443
+
444
+ if timeout_secs and (datetime.now() - start_time).total_seconds() >= timeout_secs:
445
+ # stop waiting, return a nonterminal status
446
+ return job_data
447
+
448
+ sleep(_POLLING_INTERVAL)
449
+
450
+ @staticmethod
451
+ def _serialize_model(model: BaseModel) -> str:
452
+ """Serialize a Pydantic model into a JSON string.
453
+
454
+ All Pydantic models should be serialized using this method, to keep the client behavior uniform.
455
+
456
+ Args:
457
+ model: Pydantic model to JSON-serialize.
458
+
459
+ Returns:
460
+ Corresponding JSON string, may contain arbitrary Unicode characters.
461
+
462
+ """
463
+ # Strings in model can contain non-latin-1 characters. Unlike json.dumps which encodes non-latin-1 chars
464
+ # using the \uXXXX syntax, BaseModel.model_dump_json() keeps them in the produced JSON str.
465
+ return model.model_dump_json()
466
+
467
+ @classmethod
468
+ def _create_signature(cls, client_signature: str | None) -> str:
469
+ """Prepare the User-Agent header sent to the server."""
470
+ signature = f"{platform.platform(terse=True)}"
471
+ signature += f", python {platform.python_version()}"
472
+ dist_pkg_name = "iqm-client"
473
+ signature += f", {cls.__name__} {dist_pkg_name} {version(dist_pkg_name)}"
474
+ if client_signature:
475
+ signature += f", {client_signature}"
476
+ return signature
477
+
478
+ def _resolve_quantum_computer(self, user_defined_quantum_computer: str | None) -> str:
479
+ """Human-readable alias of the quantum computer this client connects to."""
480
+ response = self._send_request(requests.get, "quantum-computers")
481
+ quantum_computers = self._deserialize_response(response, ListQuantumComputersResponse).quantum_computers
482
+ aliases = ", ".join((qc.alias for qc in quantum_computers))
483
+ if user_defined_quantum_computer is None:
484
+ if len(quantum_computers) == 1:
485
+ return quantum_computers[0].alias
486
+ raise ClientConfigurationError(f"Quantum computer not selected. Available quantum computers are: {aliases}")
487
+
488
+ qc = next((qc for qc in quantum_computers if qc.alias == user_defined_quantum_computer), None)
489
+ if qc is None:
490
+ raise ClientConfigurationError(
491
+ f'Quantum computer "{user_defined_quantum_computer}" does not exist. '
492
+ + f"Available quantum computers are: {aliases}"
493
+ )
494
+ return qc.alias
495
+
496
+ def _send_request(
497
+ self,
498
+ http_method: Callable[..., requests.Response],
499
+ url_path: str,
500
+ *,
501
+ headers: dict[str, str] | None = None,
502
+ params: dict[str, Any] | None = None,
503
+ json_data: str | None = None,
504
+ protobuf_data: bytes | None = None,
505
+ ) -> requests.Response:
506
+ """Send an HTTP request.
507
+
508
+ Parameters ``json_str`` and ``octets`` are mutually exclusive.
509
+ The first non-None argument (in this order) will be used to construct the body of the request.
510
+
511
+ Args:
512
+ http_method: HTTP method to use for the request, any of requests.[post|get|put|head|delete|patch|options].
513
+ url_path: URL for the request.
514
+ headers: Additional HTTP headers for the request. Some may be overridden.
515
+ params: HTTP query parameters to store in the query string of the request URL.
516
+ json_data: JSON string to store in the body, may contain arbitrary Unicode characters.
517
+ protobuf_data: Pre-serialized protobuf binary data to store in the body.
518
+
519
+ Returns:
520
+ Response to the request.
521
+
522
+ Raises:
523
+ StationControlError: Request was not successful.
524
+
525
+ """
526
+ # Will raise an error if respectively an error response code is returned.
527
+ # http_method should be any of requests.[post|get|put|head|delete|patch|options]
528
+
529
+ request_kwargs = self._prepare_request_kwargs(
530
+ headers=headers or {},
531
+ params=params or {},
532
+ json_data=json_data,
533
+ protobuf_data=protobuf_data,
534
+ timeout=self._timeout,
535
+ )
536
+ url = f"{self.root_url}/api/{self.api_version}/{url_path}"
537
+ response = http_method(url, **request_kwargs)
538
+ if not response.ok:
539
+ try:
540
+ response_json = response.json()
541
+ error_message = response_json.get("message") or response_json["detail"]
542
+ except (json.JSONDecodeError, KeyError):
543
+ error_message = response.text
544
+
545
+ error_class = map_from_status_code_to_error(response.status_code)
546
+ raise error_class(error_message)
547
+ return response
548
+
549
+ def _prepare_request_kwargs(
550
+ self,
551
+ *,
552
+ headers: dict[str, str],
553
+ params: dict[str, Any],
554
+ json_data: str | None = None,
555
+ protobuf_data: bytes | None = None,
556
+ timeout: float,
557
+ ) -> dict[str, Any]:
558
+ """Prepare the keyword arguments for an HTTP request."""
559
+ # Add default headers
560
+ _headers = self._default_headers()
561
+
562
+ # "json_str" and "protobuf_data" are mutually exclusive
563
+ data: bytes | None = None
564
+ if json_data is not None:
565
+ # Must be able to handle JSON strings with arbitrary Unicode characters,
566
+ # so we use an explicit encoding into bytes,
567
+ # and set the headers so the recipient can decode the request body correctly.
568
+ data = json_data.encode("utf-8")
569
+ _headers["Content-Type"] = "application/json; charset=UTF-8"
570
+ elif protobuf_data is not None:
571
+ data = protobuf_data
572
+ _headers["Content-Type"] = "application/protobuf"
573
+
574
+ if "Accept" in headers:
575
+ _headers["Accept"] = headers["Accept"]
576
+
577
+ if self._enable_opentelemetry:
578
+ parent_span_context = trace.set_span_in_context(trace.get_current_span())
579
+ propagate.inject(carrier=headers, context=parent_span_context)
580
+
581
+ kwargs = {
582
+ "headers": _headers,
583
+ "params": params,
584
+ "data": data,
585
+ "timeout": timeout,
586
+ }
587
+ return _remove_empty_values(kwargs)
588
+
589
+ def _default_headers(self) -> dict[str, str]:
590
+ headers = {
591
+ "User-Agent": self._signature,
592
+ "Accept": "application/json",
593
+ }
594
+ # If auth header callback exists, use it to add the header
595
+ if self._auth_header_callback:
596
+ headers["Authorization"] = self._auth_header_callback()
597
+ return headers
598
+
599
+ @staticmethod
600
+ def _deserialize_response(
601
+ response: requests.Response,
602
+ model_class: type[TypePydanticBase | ListModel],
603
+ ) -> TypePydanticBase:
604
+ """Deserialize data using a Pydantic model."""
605
+ # Use "model_validate_json(response.text)" instead of "model_validate(response.json())".
606
+ # This validates the provided data as a JSON string or bytes object.
607
+ # If your incoming data is a JSON payload, this is generally considered faster.
608
+ model = model_class.model_validate_json(response.text)
609
+ if isinstance(model, ListModel):
610
+ return model.root
611
+ return model
612
+
613
+
614
+ def _remove_empty_values(kwargs: dict[str, Any]) -> dict[str, Any]:
615
+ """Return a copy of the given dict without values that are None or {}."""
616
+ return {key: value for key, value in kwargs.items() if value not in [None, {}]}
617
+
618
+
619
+ def _serialize_query_params(params: dict[str, Any]) -> dict[str, Any]:
620
+ return {key: _serialize_query_param(value) for key, value in params.items() if value not in [None, {}]}
621
+
622
+
623
+ def _serialize_query_param(value: Any) -> str:
624
+ if isinstance(value, bool):
625
+ return "true" if value else "false"
626
+ return str(value)
627
+
628
+
629
+ @dataclass
630
+ class IQMServerClientJob(abc.ABC):
631
+ """ABC for classes representing an IQMServerClient job."""
632
+
633
+ data: JobData
634
+ """Light job-related data.
635
+
636
+ Experimental, should be considered private for now.
637
+ """
638
+
639
+ @property
640
+ def job_id(self) -> UUID:
641
+ """Unique ID of the job."""
642
+ return self.data.id
643
+
644
+ @property
645
+ def status(self) -> JobStatus:
646
+ """Last queried status of the job.
647
+
648
+ Note that this is not necessarily the same as the current status of the job,
649
+ unless the status is terminal.
650
+
651
+ To get the current status, use :meth:`update`.
652
+ """
653
+ return self.data.status
654
+
655
+ @property
656
+ @abc.abstractmethod
657
+ def _iqm_server_client(self) -> _IQMServerClient:
658
+ """A way to reach a client instance."""
659
+ raise NotImplementedError
660
+
661
+ @property
662
+ def _errors(self) -> str:
663
+ """All errors formatted as a string."""
664
+ return "\n".join(f" {str(error)}" for error in self.data.errors)
665
+
666
+ def update(self) -> JobStatus:
667
+ """Update the job data by querying the server.
668
+
669
+ Modifies ``self``.
670
+
671
+ Returns:
672
+ Current status of the job.
673
+
674
+ """
675
+ # TODO we somewhat unnecessarily call get_job() for COMPLETED, FAILED and CANCELLED jobs here,
676
+ # since those states are terminal.
677
+ job_data = self._iqm_server_client.get_job(self.job_id)
678
+ self.data = job_data
679
+ return self.status
680
+
681
+ def cancel(self) -> None:
682
+ """Cancel the job.
683
+
684
+ If execution is in progress, it will be interrupted after the current sweep spot.
685
+
686
+ """
687
+ self._iqm_server_client.cancel_job(self.job_id)
688
+
689
+ def wait_for_completion(
690
+ self,
691
+ *,
692
+ timeout_secs: float = DEFAULT_TIMEOUT_SECONDS,
693
+ ) -> JobStatus:
694
+ """Poll the server, updating the job status, until the job is either completed, failed, or cancelled,
695
+ or until we hit a timeout.
696
+
697
+ Will stop the polling (but does not cancel the job) upon receiving a KeyboardInterrupt (Ctrl-C).
698
+ If you want to cancel the job, call :meth:`cancel`.
699
+
700
+ Modifies ``self``.
701
+
702
+ Args:
703
+ timeout_secs: If nonzero, give up after this time (in seconds), returning the last (nonterminal) status.
704
+
705
+ Returns:
706
+ Last seen job status.
707
+
708
+ Raises:
709
+ KeyboardInterrupt: Received Ctrl-C while waiting for the job to finish.
710
+
711
+ """
712
+ try:
713
+ job_data = self._iqm_server_client._wait_job_completion(
714
+ self.job_id,
715
+ progress_callback=get_progress_bar_callback(),
716
+ timeout_secs=timeout_secs,
717
+ )
718
+ except KeyboardInterrupt:
719
+ # user pressed Ctrl-C
720
+ job_data = self._iqm_server_client.get_job(self.job_id)
721
+
722
+ self.data = job_data
723
+
724
+ if self.data.messages:
725
+ logger.debug("Job messages:\n%s", "\n".join(f" {msg.source}: {msg.message}" for msg in self.data.messages))
726
+
727
+ if self.status == JobStatus.FAILED:
728
+ logger.error(
729
+ "Job failed! Error(s):\n%s",
730
+ self._errors,
731
+ )
732
+ elif self.status == JobStatus.CANCELLED:
733
+ logger.error("Job was cancelled!")
734
+
735
+ return self.status
736
+
737
+ def find_timeline_entry(
738
+ self,
739
+ status: str,
740
+ source: Source | None = None,
741
+ ) -> TimelineEntry | None:
742
+ """Search the timeline for an entry matching the given criteria.
743
+
744
+ Args:
745
+ status: Status of the searched timeline entry.
746
+ source: Source of the searched timeline entry. If None, accepts any source.
747
+
748
+ Returns:
749
+ The first matching entry or ``None`` if the job timeline does not have any matching entries.
750
+
751
+ """
752
+ for entry in self.data.timeline:
753
+ if entry.status == status and (entry.source == source or source is None):
754
+ return entry
755
+ return None