QuLab 2.10.10__cp313-cp313-macosx_10_13_universal2.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- qulab/__init__.py +33 -0
- qulab/__main__.py +4 -0
- qulab/cli/__init__.py +0 -0
- qulab/cli/commands.py +30 -0
- qulab/cli/config.py +170 -0
- qulab/cli/decorators.py +28 -0
- qulab/dicttree.py +523 -0
- qulab/executor/__init__.py +5 -0
- qulab/executor/analyze.py +188 -0
- qulab/executor/cli.py +434 -0
- qulab/executor/load.py +563 -0
- qulab/executor/registry.py +185 -0
- qulab/executor/schedule.py +543 -0
- qulab/executor/storage.py +615 -0
- qulab/executor/template.py +259 -0
- qulab/executor/utils.py +194 -0
- qulab/expression.py +827 -0
- qulab/fun.cpython-313-darwin.so +0 -0
- qulab/monitor/__init__.py +1 -0
- qulab/monitor/__main__.py +8 -0
- qulab/monitor/config.py +41 -0
- qulab/monitor/dataset.py +77 -0
- qulab/monitor/event_queue.py +54 -0
- qulab/monitor/mainwindow.py +234 -0
- qulab/monitor/monitor.py +115 -0
- qulab/monitor/ploter.py +123 -0
- qulab/monitor/qt_compat.py +16 -0
- qulab/monitor/toolbar.py +265 -0
- qulab/scan/__init__.py +2 -0
- qulab/scan/curd.py +221 -0
- qulab/scan/models.py +554 -0
- qulab/scan/optimize.py +76 -0
- qulab/scan/query.py +387 -0
- qulab/scan/record.py +603 -0
- qulab/scan/scan.py +1166 -0
- qulab/scan/server.py +450 -0
- qulab/scan/space.py +213 -0
- qulab/scan/utils.py +234 -0
- qulab/storage/__init__.py +0 -0
- qulab/storage/__main__.py +51 -0
- qulab/storage/backend/__init__.py +0 -0
- qulab/storage/backend/redis.py +204 -0
- qulab/storage/base_dataset.py +352 -0
- qulab/storage/chunk.py +60 -0
- qulab/storage/dataset.py +127 -0
- qulab/storage/file.py +273 -0
- qulab/storage/models/__init__.py +22 -0
- qulab/storage/models/base.py +4 -0
- qulab/storage/models/config.py +28 -0
- qulab/storage/models/file.py +89 -0
- qulab/storage/models/ipy.py +58 -0
- qulab/storage/models/models.py +88 -0
- qulab/storage/models/record.py +161 -0
- qulab/storage/models/report.py +22 -0
- qulab/storage/models/tag.py +93 -0
- qulab/storage/storage.py +95 -0
- qulab/sys/__init__.py +2 -0
- qulab/sys/chat.py +688 -0
- qulab/sys/device/__init__.py +3 -0
- qulab/sys/device/basedevice.py +255 -0
- qulab/sys/device/loader.py +86 -0
- qulab/sys/device/utils.py +79 -0
- qulab/sys/drivers/FakeInstrument.py +68 -0
- qulab/sys/drivers/__init__.py +0 -0
- qulab/sys/ipy_events.py +125 -0
- qulab/sys/net/__init__.py +0 -0
- qulab/sys/net/bencoder.py +205 -0
- qulab/sys/net/cli.py +169 -0
- qulab/sys/net/dhcp.py +543 -0
- qulab/sys/net/dhcpd.py +176 -0
- qulab/sys/net/kad.py +1142 -0
- qulab/sys/net/kcp.py +192 -0
- qulab/sys/net/nginx.py +194 -0
- qulab/sys/progress.py +190 -0
- qulab/sys/rpc/__init__.py +0 -0
- qulab/sys/rpc/client.py +0 -0
- qulab/sys/rpc/exceptions.py +96 -0
- qulab/sys/rpc/msgpack.py +1052 -0
- qulab/sys/rpc/msgpack.pyi +41 -0
- qulab/sys/rpc/router.py +35 -0
- qulab/sys/rpc/rpc.py +412 -0
- qulab/sys/rpc/serialize.py +139 -0
- qulab/sys/rpc/server.py +29 -0
- qulab/sys/rpc/socket.py +29 -0
- qulab/sys/rpc/utils.py +25 -0
- qulab/sys/rpc/worker.py +0 -0
- qulab/sys/rpc/zmq_socket.py +227 -0
- qulab/tools/__init__.py +0 -0
- qulab/tools/connection_helper.py +39 -0
- qulab/typing.py +2 -0
- qulab/utils.py +95 -0
- qulab/version.py +1 -0
- qulab/visualization/__init__.py +188 -0
- qulab/visualization/__main__.py +71 -0
- qulab/visualization/_autoplot.py +464 -0
- qulab/visualization/plot_circ.py +319 -0
- qulab/visualization/plot_layout.py +408 -0
- qulab/visualization/plot_seq.py +242 -0
- qulab/visualization/qdat.py +152 -0
- qulab/visualization/rot3d.py +23 -0
- qulab/visualization/widgets.py +86 -0
- qulab-2.10.10.dist-info/METADATA +110 -0
- qulab-2.10.10.dist-info/RECORD +107 -0
- qulab-2.10.10.dist-info/WHEEL +5 -0
- qulab-2.10.10.dist-info/entry_points.txt +2 -0
- qulab-2.10.10.dist-info/licenses/LICENSE +21 -0
- qulab-2.10.10.dist-info/top_level.txt +1 -0
@@ -0,0 +1,615 @@
|
|
1
|
+
import hashlib
|
2
|
+
import lzma
|
3
|
+
import pickle
|
4
|
+
import re
|
5
|
+
import uuid
|
6
|
+
import zipfile
|
7
|
+
from dataclasses import dataclass, field
|
8
|
+
from datetime import datetime, timedelta
|
9
|
+
from functools import lru_cache
|
10
|
+
from pathlib import Path
|
11
|
+
from typing import Any, Literal
|
12
|
+
from urllib.parse import parse_qs
|
13
|
+
|
14
|
+
from loguru import logger
|
15
|
+
|
16
|
+
try:
|
17
|
+
from paramiko import SSHClient
|
18
|
+
from paramiko.ssh_exception import SSHException
|
19
|
+
except:
|
20
|
+
import warnings
|
21
|
+
|
22
|
+
warnings.warn("Can't import paramiko, ssh support will be disabled.")
|
23
|
+
|
24
|
+
class SSHClient:
|
25
|
+
|
26
|
+
def __init__(self):
|
27
|
+
raise ImportError(
|
28
|
+
"Can't import paramiko, ssh support will be disabled.")
|
29
|
+
|
30
|
+
def __enter__(self):
|
31
|
+
return self
|
32
|
+
|
33
|
+
def __exit__(self, exc_type, exc_value, traceback):
|
34
|
+
pass
|
35
|
+
|
36
|
+
class SSHException(Exception):
|
37
|
+
pass
|
38
|
+
|
39
|
+
|
40
|
+
from ..cli.config import get_config_value
|
41
|
+
|
42
|
+
__current_config_cache = None
|
43
|
+
|
44
|
+
|
45
|
+
@dataclass
|
46
|
+
class Report():
|
47
|
+
workflow: str = ''
|
48
|
+
in_spec: bool = False
|
49
|
+
bad_data: bool = False
|
50
|
+
fully_calibrated: bool = False
|
51
|
+
calibrated_time: datetime = field(default_factory=datetime.now)
|
52
|
+
checked_time: datetime = field(default_factory=datetime.now)
|
53
|
+
ttl: timedelta = timedelta(days=3650)
|
54
|
+
parameters: dict = field(default_factory=dict)
|
55
|
+
oracle: dict = field(default_factory=dict)
|
56
|
+
other_infomation: dict = field(default_factory=dict)
|
57
|
+
data: Any = field(default_factory=tuple, repr=False)
|
58
|
+
index: int = -1
|
59
|
+
previous_path: Path | None = field(default=None, repr=False)
|
60
|
+
heads: dict[str, Path] = field(default_factory=dict, repr=False)
|
61
|
+
base_path: str | Path | None = field(default=None, repr=False)
|
62
|
+
path: Path | None = field(default=None, repr=False)
|
63
|
+
config_path: Path | None = field(default=None, repr=False)
|
64
|
+
script_path: Path | None = field(default=None, repr=False)
|
65
|
+
|
66
|
+
def __getstate__(self):
|
67
|
+
state = self.__dict__.copy()
|
68
|
+
state.pop('base_path', None)
|
69
|
+
for k in ['path', 'previous_path', 'config_path', 'script_path']:
|
70
|
+
if state[k] is not None:
|
71
|
+
state[k] = str(state[k])
|
72
|
+
return state
|
73
|
+
|
74
|
+
def __setstate__(self, state):
|
75
|
+
for k in ['path', 'previous_path', 'config_path', 'script_path']:
|
76
|
+
if state[k] is not None:
|
77
|
+
state[k] = Path(state[k])
|
78
|
+
self.__dict__.update(state)
|
79
|
+
|
80
|
+
@property
|
81
|
+
def previous(self):
|
82
|
+
if self.previous_path is not None and self.base_path is not None:
|
83
|
+
return load_report(self.previous_path, self.base_path)
|
84
|
+
else:
|
85
|
+
return None
|
86
|
+
|
87
|
+
@property
|
88
|
+
def state(self) -> Literal['OK', 'Bad', 'Outdated']:
|
89
|
+
state = 'Bad'
|
90
|
+
match (self.in_spec, self.bad_data):
|
91
|
+
case (True, False):
|
92
|
+
state = 'OK'
|
93
|
+
case (False, True):
|
94
|
+
state = 'Bad'
|
95
|
+
case (False, False):
|
96
|
+
state = 'Outdated'
|
97
|
+
return state
|
98
|
+
|
99
|
+
@state.setter
|
100
|
+
def state(self, state: Literal['OK', 'Bad', 'Outdated', 'In spec',
|
101
|
+
'Out of spec', 'Bad data']):
|
102
|
+
if state not in [
|
103
|
+
'OK', 'Bad', 'Outdated', 'In spec', 'Out of spec', 'Bad data'
|
104
|
+
]:
|
105
|
+
raise ValueError(
|
106
|
+
f'Invalid state: {state}, state must be one of "OK", "Bad" and "Outdated"'
|
107
|
+
)
|
108
|
+
if state in ['In spec', 'OK']:
|
109
|
+
self.in_spec = True
|
110
|
+
self.bad_data = False
|
111
|
+
elif state in ['Bad data', 'Bad']:
|
112
|
+
self.bad_data = True
|
113
|
+
self.in_spec = False
|
114
|
+
else:
|
115
|
+
self.bad_data = False
|
116
|
+
self.in_spec = False
|
117
|
+
|
118
|
+
@property
|
119
|
+
def config(self):
|
120
|
+
if self.config_path is not None and self.base_path is not None:
|
121
|
+
return load_item(self.config_path, 'items', self.base_path)
|
122
|
+
else:
|
123
|
+
return None
|
124
|
+
|
125
|
+
@property
|
126
|
+
def script(self):
|
127
|
+
if self.script_path is not None and self.base_path is not None:
|
128
|
+
source = load_item(self.script_path, 'items', self.base_path)
|
129
|
+
if isinstance(source, str):
|
130
|
+
return source
|
131
|
+
else:
|
132
|
+
from .template import inject_mapping
|
133
|
+
return inject_mapping(*source)[0]
|
134
|
+
else:
|
135
|
+
return None
|
136
|
+
|
137
|
+
@property
|
138
|
+
def template_source(self):
|
139
|
+
if self.script_path is not None and self.base_path is not None:
|
140
|
+
source = load_item(self.script_path, 'items', self.base_path)
|
141
|
+
return source
|
142
|
+
else:
|
143
|
+
return None
|
144
|
+
|
145
|
+
|
146
|
+
def random_path(base: Path) -> Path:
|
147
|
+
while True:
|
148
|
+
s = uuid.uuid4().hex
|
149
|
+
path = Path(s[:2]) / s[2:4] / s[4:6] / s[6:]
|
150
|
+
if not (base / path).exists():
|
151
|
+
return path
|
152
|
+
|
153
|
+
|
154
|
+
def find_report(
|
155
|
+
workflow: str, base_path: str | Path = get_config_value("data", Path)
|
156
|
+
) -> Report | None:
|
157
|
+
if workflow.startswith("cfg:"):
|
158
|
+
return find_config_key_history(workflow[4:], base_path)
|
159
|
+
|
160
|
+
base_path = Path(base_path)
|
161
|
+
path = get_head(workflow, base_path)
|
162
|
+
if path is None:
|
163
|
+
return None
|
164
|
+
return load_report(path, base_path)
|
165
|
+
|
166
|
+
|
167
|
+
def renew_report(workflow: str, report: Report | None, base_path: str | Path):
|
168
|
+
logger.debug(f'Renewing report for "{workflow}"')
|
169
|
+
if report is not None:
|
170
|
+
report.checked_time = datetime.now()
|
171
|
+
return save_report(workflow,
|
172
|
+
report,
|
173
|
+
base_path,
|
174
|
+
overwrite=True,
|
175
|
+
refresh_heads=True)
|
176
|
+
else:
|
177
|
+
raise ValueError(f"Can't renew report for {workflow}")
|
178
|
+
|
179
|
+
|
180
|
+
def revoke_report(workflow: str, report: Report | None, base_path: str | Path):
|
181
|
+
logger.debug(f'Revoking report for "{workflow}"')
|
182
|
+
base_path = Path(base_path)
|
183
|
+
if report is not None:
|
184
|
+
report.in_spec = False
|
185
|
+
report.previous_path = report.path
|
186
|
+
return save_report(workflow,
|
187
|
+
report,
|
188
|
+
base_path,
|
189
|
+
overwrite=False,
|
190
|
+
refresh_heads=True)
|
191
|
+
|
192
|
+
|
193
|
+
def get_report_by_index(
|
194
|
+
index: int, base_path: str | Path = get_config_value("data", Path)
|
195
|
+
) -> Report | None:
|
196
|
+
try:
|
197
|
+
path = query_index("report", base_path, index)
|
198
|
+
return load_report(path, base_path)
|
199
|
+
except:
|
200
|
+
raise
|
201
|
+
return None
|
202
|
+
|
203
|
+
|
204
|
+
def get_head(workflow: str, base_path: str | Path) -> Path | None:
|
205
|
+
return get_heads(base_path).get(workflow, None)
|
206
|
+
|
207
|
+
|
208
|
+
#########################################################################
|
209
|
+
## Basic Write API ##
|
210
|
+
#########################################################################
|
211
|
+
|
212
|
+
|
213
|
+
def set_head(workflow: str, path: Path, base_path: str | Path):
|
214
|
+
base_path = Path(base_path)
|
215
|
+
base_path.mkdir(parents=True, exist_ok=True)
|
216
|
+
try:
|
217
|
+
with open(base_path / "heads", "rb") as f:
|
218
|
+
heads = pickle.load(f)
|
219
|
+
except:
|
220
|
+
heads = {}
|
221
|
+
heads[workflow] = path
|
222
|
+
with open(base_path / "heads", "wb") as f:
|
223
|
+
pickle.dump(heads, f)
|
224
|
+
|
225
|
+
|
226
|
+
def save_report(workflow: str,
|
227
|
+
report: Report,
|
228
|
+
base_path: str | Path,
|
229
|
+
overwrite: bool = False,
|
230
|
+
refresh_heads: bool = True) -> int:
|
231
|
+
if workflow.startswith("cfg:"):
|
232
|
+
return save_config_key_history(workflow[4:], report, base_path)
|
233
|
+
|
234
|
+
logger.debug(
|
235
|
+
f'Saving report for "{workflow}", {report.in_spec=}, {report.bad_data=}, {report.fully_calibrated=}'
|
236
|
+
)
|
237
|
+
base_path = Path(base_path)
|
238
|
+
try:
|
239
|
+
buf = lzma.compress(pickle.dumps(report))
|
240
|
+
except:
|
241
|
+
raise ValueError(f"Can't pickle report for {workflow}")
|
242
|
+
if overwrite:
|
243
|
+
path = report.path
|
244
|
+
if path is None:
|
245
|
+
raise ValueError("Report path is None, can't overwrite.")
|
246
|
+
with open(base_path / 'reports' / path, "rb") as f:
|
247
|
+
index = int.from_bytes(f.read(8), 'big')
|
248
|
+
report.index = index
|
249
|
+
else:
|
250
|
+
path = random_path(base_path / 'reports')
|
251
|
+
(base_path / 'reports' / path).parent.mkdir(parents=True,
|
252
|
+
exist_ok=True)
|
253
|
+
report.path = path
|
254
|
+
report.index = create_index("report",
|
255
|
+
base_path,
|
256
|
+
context=str(path),
|
257
|
+
width=35)
|
258
|
+
with open(base_path / 'reports' / path, "wb") as f:
|
259
|
+
f.write(report.index.to_bytes(8, 'big'))
|
260
|
+
f.write(buf)
|
261
|
+
if refresh_heads:
|
262
|
+
set_head(workflow, path, base_path)
|
263
|
+
return report.index
|
264
|
+
|
265
|
+
|
266
|
+
def create_index(name: str,
|
267
|
+
base_path: str | Path,
|
268
|
+
context: str,
|
269
|
+
width: int = -1,
|
270
|
+
start: int = 0):
|
271
|
+
|
272
|
+
path = Path(base_path) / "index" / name
|
273
|
+
if width == -1:
|
274
|
+
width = len(context)
|
275
|
+
else:
|
276
|
+
width = max(width, len(context))
|
277
|
+
|
278
|
+
if path.with_suffix('.width').exists():
|
279
|
+
width = int(path.with_suffix('.width').read_text())
|
280
|
+
index = int(path.with_suffix('.seq').read_text())
|
281
|
+
else:
|
282
|
+
index = start
|
283
|
+
if width < len(context):
|
284
|
+
raise ValueError(
|
285
|
+
f"Context '{context}' is too long, existing width of '{name}' is {width}."
|
286
|
+
)
|
287
|
+
if not path.with_suffix('.width').exists():
|
288
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
289
|
+
path.with_suffix('.width').write_text(str(width))
|
290
|
+
|
291
|
+
path.with_suffix('.seq').write_text(str(index + 1))
|
292
|
+
|
293
|
+
with path.with_suffix('.idx').open("a") as f:
|
294
|
+
f.write(f"{context.ljust(width)}\n")
|
295
|
+
|
296
|
+
return index
|
297
|
+
|
298
|
+
|
299
|
+
def save_item(item, group, data_path):
|
300
|
+
salt = 0
|
301
|
+
buf = pickle.dumps(item)
|
302
|
+
buf = lzma.compress(buf)
|
303
|
+
h = hashlib.md5(buf)
|
304
|
+
|
305
|
+
while True:
|
306
|
+
h.update(f"{salt}".encode())
|
307
|
+
hashstr = h.hexdigest()
|
308
|
+
item_id = Path(hashstr[:2]) / hashstr[2:4] / hashstr[4:]
|
309
|
+
path = Path(data_path) / group / item_id
|
310
|
+
if not path.exists():
|
311
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
312
|
+
with open(path, 'wb') as f:
|
313
|
+
f.write(buf)
|
314
|
+
break
|
315
|
+
elif path.read_bytes() == buf:
|
316
|
+
break
|
317
|
+
salt += 1
|
318
|
+
return str(item_id)
|
319
|
+
|
320
|
+
|
321
|
+
def append_item_data(data, id, group, base_path):
|
322
|
+
path = Path(base_path) / group / id
|
323
|
+
if not path.exists():
|
324
|
+
raise ValueError(f"Item {id} does not exist.")
|
325
|
+
with open(path, 'ab') as f:
|
326
|
+
f.write(data)
|
327
|
+
|
328
|
+
|
329
|
+
def save_config_key_history(key: str, report: Report,
|
330
|
+
base_path: str | Path) -> int:
|
331
|
+
global __current_config_cache
|
332
|
+
base_path = Path(base_path) / 'state'
|
333
|
+
base_path.mkdir(parents=True, exist_ok=True)
|
334
|
+
|
335
|
+
if __current_config_cache is None:
|
336
|
+
if (base_path / 'parameters.pkl').exists():
|
337
|
+
with open(base_path / 'parameters.pkl', 'rb') as f:
|
338
|
+
__current_config_cache = pickle.load(f)
|
339
|
+
else:
|
340
|
+
__current_config_cache = {}
|
341
|
+
|
342
|
+
__current_config_cache[
|
343
|
+
key] = report.data, report.calibrated_time, report.checked_time
|
344
|
+
|
345
|
+
with open(base_path / 'parameters.pkl', 'wb') as f:
|
346
|
+
pickle.dump(__current_config_cache, f)
|
347
|
+
return 0
|
348
|
+
|
349
|
+
|
350
|
+
#########################################################################
|
351
|
+
## Basic Read API ##
|
352
|
+
#########################################################################
|
353
|
+
|
354
|
+
|
355
|
+
def load_report(path: str | Path, base_path: str | Path) -> Report | None:
|
356
|
+
if isinstance(base_path, str) and base_path.startswith('ssh://'):
|
357
|
+
with SSHClient() as client:
|
358
|
+
cfg = parse_ssh_uri(base_path)
|
359
|
+
remote_base_path = cfg.pop('remote_file_path')
|
360
|
+
client.load_system_host_keys()
|
361
|
+
client.connect(**cfg)
|
362
|
+
report = load_report_from_scp(path, remote_base_path, client)
|
363
|
+
report.base_path = base_path
|
364
|
+
return report
|
365
|
+
|
366
|
+
base_path = Path(base_path)
|
367
|
+
if zipfile.is_zipfile(base_path):
|
368
|
+
return load_report_from_zipfile(path, base_path)
|
369
|
+
|
370
|
+
path = base_path / 'reports' / path
|
371
|
+
|
372
|
+
with open(base_path / 'reports' / path, "rb") as f:
|
373
|
+
index = int.from_bytes(f.read(8), 'big')
|
374
|
+
report = pickle.loads(lzma.decompress(f.read()))
|
375
|
+
report.base_path = base_path
|
376
|
+
report.index = index
|
377
|
+
return report
|
378
|
+
|
379
|
+
|
380
|
+
def get_heads(base_path: str | Path) -> Path | None:
|
381
|
+
if isinstance(base_path, str) and base_path.startswith('ssh://'):
|
382
|
+
with SSHClient() as client:
|
383
|
+
cfg = parse_ssh_uri(base_path)
|
384
|
+
remote_base_path = cfg.pop('remote_file_path')
|
385
|
+
client.load_system_host_keys()
|
386
|
+
client.connect(**cfg)
|
387
|
+
return get_heads_from_scp(remote_base_path, client)
|
388
|
+
|
389
|
+
base_path = Path(base_path)
|
390
|
+
if zipfile.is_zipfile(base_path):
|
391
|
+
return get_heads_from_zipfile(base_path)
|
392
|
+
try:
|
393
|
+
with open(base_path / "heads", "rb") as f:
|
394
|
+
heads = pickle.load(f)
|
395
|
+
return heads
|
396
|
+
except:
|
397
|
+
return {}
|
398
|
+
|
399
|
+
|
400
|
+
@lru_cache(maxsize=4096)
|
401
|
+
def query_index(name: str, base_path: str | Path, index: int):
|
402
|
+
if isinstance(base_path, str) and base_path.startswith('ssh://'):
|
403
|
+
with SSHClient() as client:
|
404
|
+
cfg = parse_ssh_uri(base_path)
|
405
|
+
remote_base_path = cfg.pop('remote_file_path')
|
406
|
+
client.load_system_host_keys()
|
407
|
+
client.connect(**cfg)
|
408
|
+
return query_index_from_scp(name, remote_base_path, client, index)
|
409
|
+
|
410
|
+
base_path = Path(base_path)
|
411
|
+
if zipfile.is_zipfile(base_path):
|
412
|
+
return query_index_from_zipfile(name, base_path, index)
|
413
|
+
path = Path(base_path) / "index" / name
|
414
|
+
width = int(path.with_suffix('.width').read_text())
|
415
|
+
|
416
|
+
with path.with_suffix('.idx').open("r") as f:
|
417
|
+
f.seek(index * (width + 1))
|
418
|
+
context = f.read(width)
|
419
|
+
return context.rstrip()
|
420
|
+
|
421
|
+
|
422
|
+
@lru_cache(maxsize=4096)
|
423
|
+
def load_item(id, group, base_path):
|
424
|
+
if isinstance(base_path, str) and base_path.startswith('ssh://'):
|
425
|
+
with SSHClient() as client:
|
426
|
+
cfg = parse_ssh_uri(base_path)
|
427
|
+
remote_base_path = cfg.pop('remote_file_path')
|
428
|
+
client.load_system_host_keys()
|
429
|
+
client.connect(**cfg)
|
430
|
+
buf = load_item_buf_from_scp(id, group, remote_base_path, client)
|
431
|
+
else:
|
432
|
+
base_path = Path(base_path)
|
433
|
+
if zipfile.is_zipfile(base_path):
|
434
|
+
buf = load_item_buf_from_zipfile(id, group, base_path)
|
435
|
+
else:
|
436
|
+
path = Path(base_path) / group / id
|
437
|
+
with open(path, 'rb') as f:
|
438
|
+
buf = f.read()
|
439
|
+
item = pickle.loads(lzma.decompress(buf))
|
440
|
+
return item
|
441
|
+
|
442
|
+
|
443
|
+
def find_config_key_history(key: str, base_path: str | Path) -> Report | None:
|
444
|
+
global __current_config_cache
|
445
|
+
base_path = Path(base_path) / 'state'
|
446
|
+
if __current_config_cache is None:
|
447
|
+
if (base_path / 'parameters.pkl').exists():
|
448
|
+
with open(base_path / 'parameters.pkl', 'rb') as f:
|
449
|
+
__current_config_cache = pickle.load(f)
|
450
|
+
else:
|
451
|
+
__current_config_cache = {}
|
452
|
+
|
453
|
+
if key in __current_config_cache:
|
454
|
+
value, calibrated_time, checked_time = __current_config_cache.get(
|
455
|
+
key, None)
|
456
|
+
report = Report(
|
457
|
+
workflow=f'cfg:{key}',
|
458
|
+
bad_data=False,
|
459
|
+
in_spec=True,
|
460
|
+
fully_calibrated=True,
|
461
|
+
parameters={key: value},
|
462
|
+
data=value,
|
463
|
+
calibrated_time=calibrated_time,
|
464
|
+
checked_time=checked_time,
|
465
|
+
)
|
466
|
+
return report
|
467
|
+
return None
|
468
|
+
|
469
|
+
|
470
|
+
#########################################################################
|
471
|
+
## Zipfile support ##
|
472
|
+
#########################################################################
|
473
|
+
|
474
|
+
|
475
|
+
def load_report_from_zipfile(path: str | Path,
|
476
|
+
base_path: str | Path) -> Report | None:
|
477
|
+
path = Path(path)
|
478
|
+
with zipfile.ZipFile(base_path) as zf:
|
479
|
+
path = '/'.join(path.parts)
|
480
|
+
with zf.open(f"{base_path.stem}/reports/{path}") as f:
|
481
|
+
index = int.from_bytes(f.read(8), 'big')
|
482
|
+
report = pickle.loads(lzma.decompress(f.read()))
|
483
|
+
report.base_path = base_path
|
484
|
+
report.index = index
|
485
|
+
return report
|
486
|
+
|
487
|
+
|
488
|
+
def get_heads_from_zipfile(base_path: str | Path) -> Path | None:
|
489
|
+
with zipfile.ZipFile(base_path) as zf:
|
490
|
+
with zf.open(f"{base_path.stem}/heads") as f:
|
491
|
+
heads = pickle.load(f)
|
492
|
+
return heads
|
493
|
+
|
494
|
+
|
495
|
+
def query_index_from_zipfile(name: str, base_path: str | Path, index: int):
|
496
|
+
with zipfile.ZipFile(base_path) as zf:
|
497
|
+
with zf.open(f"{base_path.stem}/index/{name}.width") as f:
|
498
|
+
width = int(f.read().decode())
|
499
|
+
with zf.open(f"{base_path.stem}/index/{name}.idx") as f:
|
500
|
+
f.seek(index * (width + 1))
|
501
|
+
context = f.read(width).decode()
|
502
|
+
return context.rstrip()
|
503
|
+
|
504
|
+
|
505
|
+
def load_item_buf_from_zipfile(id, group, base_path):
|
506
|
+
with zipfile.ZipFile(base_path) as zf:
|
507
|
+
with zf.open(f"{base_path.stem}/{group}/{id}") as f:
|
508
|
+
return f.read()
|
509
|
+
|
510
|
+
|
511
|
+
#########################################################################
|
512
|
+
## SCP support ##
|
513
|
+
#########################################################################
|
514
|
+
|
515
|
+
|
516
|
+
def parse_ssh_uri(uri):
|
517
|
+
"""
|
518
|
+
解析 SSH URI 字符串,返回包含连接参数和路径的字典。
|
519
|
+
|
520
|
+
格式:ssh://[{username}[:{password}]@]{host}[:{port}][?key_filename={key_path}][/{remote_file_path}]
|
521
|
+
|
522
|
+
返回示例:
|
523
|
+
{
|
524
|
+
"username": "user",
|
525
|
+
"password": "pass",
|
526
|
+
"host": "example.com",
|
527
|
+
"port": 22,
|
528
|
+
"key_filename": "/path/to/key",
|
529
|
+
"remote_file_path": "/data/file.txt"
|
530
|
+
}
|
531
|
+
"""
|
532
|
+
pattern = re.compile(
|
533
|
+
r"^ssh://" # 协议头
|
534
|
+
r"(?:([^:@/]+))(?::([^@/]+))?@?" # 用户名和密码(可选)
|
535
|
+
r"([^:/?#]+)" # 主机名(必须)
|
536
|
+
r"(?::(\d+))?" # 端口(可选)
|
537
|
+
r"(/?[^?#]*)?" # 远程路径(可选)
|
538
|
+
r"(?:\?([^#]+))?" # 查询参数(如 key_filename)
|
539
|
+
r"$",
|
540
|
+
re.IGNORECASE)
|
541
|
+
|
542
|
+
match = pattern.match(uri)
|
543
|
+
if not match:
|
544
|
+
raise ValueError(f"Invalid SSH URI format: {uri}")
|
545
|
+
|
546
|
+
# 提取分组
|
547
|
+
username, password, host, port, path, query = match.groups()
|
548
|
+
|
549
|
+
# 处理查询参数
|
550
|
+
key_filename = None
|
551
|
+
if query:
|
552
|
+
params = parse_qs(query)
|
553
|
+
key_filename = params.get("key_filename", [None])[0] # 取第一个值
|
554
|
+
|
555
|
+
# 清理路径开头的斜杠
|
556
|
+
remote_file_path = path
|
557
|
+
|
558
|
+
return {
|
559
|
+
"username": username,
|
560
|
+
"password": password,
|
561
|
+
"hostname": host,
|
562
|
+
"port": int(port) if port else 22, # 默认端口 22
|
563
|
+
"key_filename": key_filename,
|
564
|
+
"remote_file_path": remote_file_path
|
565
|
+
}
|
566
|
+
|
567
|
+
|
568
|
+
def load_report_from_scp(path: str | Path, base_path: Path,
|
569
|
+
client: SSHClient) -> Report:
|
570
|
+
try:
|
571
|
+
path = Path(path)
|
572
|
+
with client.open_sftp() as sftp:
|
573
|
+
with sftp.open(str(Path(base_path) / 'reports' / path), 'rb') as f:
|
574
|
+
index = int.from_bytes(f.read(8), 'big')
|
575
|
+
report = pickle.loads(lzma.decompress(f.read()))
|
576
|
+
report.index = index
|
577
|
+
return report
|
578
|
+
except SSHException:
|
579
|
+
raise ValueError(f"Can't load report from {path}")
|
580
|
+
|
581
|
+
|
582
|
+
def get_heads_from_scp(base_path: Path, client: SSHClient) -> Path | None:
|
583
|
+
try:
|
584
|
+
with client.open_sftp() as sftp:
|
585
|
+
with sftp.open(str(Path(base_path) / 'heads'), 'rb') as f:
|
586
|
+
heads = pickle.load(f)
|
587
|
+
return heads
|
588
|
+
except SSHException:
|
589
|
+
return None
|
590
|
+
|
591
|
+
|
592
|
+
def query_index_from_scp(name: str, base_path: Path, client: SSHClient,
|
593
|
+
index: int):
|
594
|
+
try:
|
595
|
+
with client.open_sftp() as sftp:
|
596
|
+
s = str(Path(base_path) / 'index' / f'{name}.width')
|
597
|
+
with sftp.open(s, 'rb') as f:
|
598
|
+
width = int(f.read().decode())
|
599
|
+
with sftp.open(str(Path(base_path) / 'index' / f'{name}.idx'),
|
600
|
+
'rb') as f:
|
601
|
+
f.seek(index * (width + 1))
|
602
|
+
context = f.read(width).decode()
|
603
|
+
return context.rstrip()
|
604
|
+
except SSHException:
|
605
|
+
return None
|
606
|
+
|
607
|
+
|
608
|
+
def load_item_buf_from_scp(id: str, group: str, base_path: Path,
|
609
|
+
client: SSHClient):
|
610
|
+
try:
|
611
|
+
with client.open_sftp() as sftp:
|
612
|
+
with sftp.open(str(Path(base_path) / group / str(id)), 'rb') as f:
|
613
|
+
return f.read()
|
614
|
+
except SSHException:
|
615
|
+
return None
|