QuLab 2.7.18__cp311-cp311-win_amd64.whl → 2.8.0__cp311-cp311-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
qulab/executor/cli.py CHANGED
@@ -2,6 +2,7 @@ import functools
2
2
  import graphlib
3
3
  import importlib
4
4
  import os
5
+ import sys
5
6
  from pathlib import Path
6
7
 
7
8
  import click
@@ -68,6 +69,10 @@ def command_option(command_name):
68
69
  help='The path of the bootstrap.')
69
70
  @functools.wraps(func)
70
71
  def wrapper(*args, **kwargs):
72
+ if 'code' in kwargs and kwargs['code'] is not None:
73
+ code = os.path.expanduser(kwargs['code'])
74
+ if code not in sys.path:
75
+ sys.path.insert(0, code)
71
76
  bootstrap = kwargs.pop('bootstrap')
72
77
  if bootstrap is not None:
73
78
  boot(bootstrap)
qulab/executor/load.py CHANGED
@@ -81,16 +81,6 @@ class SetConfigWorkflow():
81
81
  WorkflowType = ModuleType | SetConfigWorkflow
82
82
 
83
83
 
84
- def get_source(workflow: WorkflowType, code_path: str | Path) -> str:
85
- if isinstance(code_path, str):
86
- code_path = Path(code_path)
87
- try:
88
- with open(code_path / workflow.__workflow_id__, 'r') as f:
89
- return f.read()
90
- except:
91
- return ''
92
-
93
-
94
84
  def can_call_without_args(func):
95
85
  if not callable(func):
96
86
  return False
@@ -320,6 +310,8 @@ def load_workflow_from_file(file_name: str,
320
310
  module = module_from_spec(spec)
321
311
  spec.loader.exec_module(module)
322
312
  module.__mtime__ = (base_path / path).stat().st_mtime
313
+ source_code = (base_path / path).read_text()
314
+ module.__source__ = source_code
323
315
 
324
316
  if hasattr(module, 'entries'):
325
317
  verify_entries(module, base_path)
@@ -347,11 +339,11 @@ def load_workflow_from_template(template_path: str,
347
339
  path = Path(template_path)
348
340
 
349
341
  with open(base_path / path) as f:
350
- content = f.read()
342
+ template = f.read()
351
343
 
352
344
  mtime = max((base_path / template_path).stat().st_mtime, mtime)
353
345
 
354
- content, hash_str = inject_mapping(content, mapping, str(path))
346
+ content, hash_str = inject_mapping(template, mapping, str(path))
355
347
 
356
348
  if target_path is None:
357
349
  if path.stem == 'template':
@@ -365,22 +357,19 @@ def load_workflow_from_template(template_path: str,
365
357
  path = target_path
366
358
 
367
359
  file = base_path / path
368
- if not file.exists():
360
+ if not file.exists() or file.stat().st_mtime < mtime:
369
361
  file.parent.mkdir(parents=True, exist_ok=True)
370
362
  with open(file, 'w') as f:
371
363
  f.write(content)
372
- else:
373
- if file.stat().st_mtime < mtime:
374
- with open(file, 'w') as f:
375
- f.write(content)
376
- else:
377
- if file.read_text() != content:
378
- logger.warning(
379
- f"`{file}` already exists and is different from the new one generated from template `{template_path}`"
380
- )
364
+ elif file.read_text() != content:
365
+ logger.warning(
366
+ f"`{file}` already exists and is different from the new one generated from template `{template_path}`"
367
+ )
381
368
 
382
369
  module = load_workflow_from_file(str(path), base_path, package)
383
370
  module.__mtime__ = max(mtime, module.__mtime__)
371
+ if module.__source__ == content:
372
+ module.__source__ = template, mapping, str(template_path)
384
373
 
385
374
  return module
386
375
 
@@ -422,8 +411,6 @@ def load_workflow(workflow: str | tuple[str, dict],
422
411
  w.__workflow_id__ = str(Path(w.__file__).relative_to(base_path))
423
412
  else:
424
413
  raise TypeError(f"Invalid workflow: {workflow}")
425
-
426
- w.__source__ = get_source(w, base_path)
427
414
 
428
415
  return w
429
416
 
qulab/executor/storage.py CHANGED
@@ -2,6 +2,7 @@ import hashlib
2
2
  import lzma
3
3
  import pickle
4
4
  import uuid
5
+ import zipfile
5
6
  from dataclasses import dataclass, field
6
7
  from datetime import datetime, timedelta
7
8
  from functools import lru_cache
@@ -10,6 +11,22 @@ from typing import Any, Literal
10
11
 
11
12
  from loguru import logger
12
13
 
14
+ try:
15
+ from paramiko import SSHClient
16
+ from paramiko.ssh_exception import SSHException
17
+ except:
18
+ class SSHClient:
19
+
20
+ def __init__(self):
21
+ raise ImportError("Can't import paramiko, ssh support will be disabled.")
22
+
23
+ def __enter__(self):
24
+ return self
25
+
26
+ def __exit__(self, exc_type, exc_value, traceback):
27
+ pass
28
+
29
+
13
30
  from ..cli.config import get_config_value
14
31
 
15
32
  __current_config_cache = None
@@ -36,6 +53,20 @@ class Report():
36
53
  config_path: Path | None = field(default=None, repr=False)
37
54
  script_path: Path | None = field(default=None, repr=False)
38
55
 
56
+ def __getstate__(self):
57
+ state = self.__dict__.copy()
58
+ state.pop('base_path')
59
+ for k in ['path', 'previous_path', 'config_path', 'script_path']:
60
+ if state[k] is not None:
61
+ state[k] = str(state[k])
62
+ return state
63
+
64
+ def __setstate__(self, state):
65
+ for k in ['path', 'previous_path', 'config_path', 'script_path']:
66
+ if state[k] is not None:
67
+ state[k] = Path(state[k])
68
+ self.__dict__.update(state)
69
+
39
70
  @property
40
71
  def previous(self):
41
72
  if self.previous_path is not None and self.base_path is not None:
@@ -84,7 +115,20 @@ class Report():
84
115
  @property
85
116
  def script(self):
86
117
  if self.script_path is not None and self.base_path is not None:
87
- return load_item(self.script_path, self.base_path)
118
+ source = load_item(self.script_path, self.base_path)
119
+ if isinstance(source, str):
120
+ return source
121
+ else:
122
+ from .template import inject_mapping
123
+ return inject_mapping(*source)[0]
124
+ else:
125
+ return None
126
+
127
+ @property
128
+ def template_source(self):
129
+ if self.script_path is not None and self.base_path is not None:
130
+ source = load_item(self.script_path, self.base_path)
131
+ return source
88
132
  else:
89
133
  return None
90
134
 
@@ -97,106 +141,6 @@ def random_path(base: Path) -> Path:
97
141
  return path
98
142
 
99
143
 
100
- def save_config_key_history(key: str, report: Report,
101
- base_path: str | Path) -> int:
102
- global __current_config_cache
103
- base_path = Path(base_path) / 'state'
104
- base_path.mkdir(parents=True, exist_ok=True)
105
-
106
- if __current_config_cache is None:
107
- if (base_path / 'parameters.pkl').exists():
108
- with open(base_path / 'parameters.pkl', 'rb') as f:
109
- __current_config_cache = pickle.load(f)
110
- else:
111
- __current_config_cache = {}
112
-
113
- __current_config_cache[
114
- key] = report.data, report.calibrated_time, report.checked_time
115
-
116
- with open(base_path / 'parameters.pkl', 'wb') as f:
117
- pickle.dump(__current_config_cache, f)
118
- return 0
119
-
120
-
121
- def find_config_key_history(key: str, base_path: str | Path) -> Report | None:
122
- global __current_config_cache
123
- base_path = Path(base_path) / 'state'
124
- if __current_config_cache is None:
125
- if (base_path / 'parameters.pkl').exists():
126
- with open(base_path / 'parameters.pkl', 'rb') as f:
127
- __current_config_cache = pickle.load(f)
128
- else:
129
- __current_config_cache = {}
130
-
131
- if key in __current_config_cache:
132
- value, calibrated_time, checked_time = __current_config_cache.get(
133
- key, None)
134
- report = Report(
135
- workflow=f'cfg:{key}',
136
- bad_data=False,
137
- in_spec=True,
138
- fully_calibrated=True,
139
- parameters={key: value},
140
- data=value,
141
- calibrated_time=calibrated_time,
142
- checked_time=checked_time,
143
- )
144
- return report
145
- return None
146
-
147
-
148
- def save_report(workflow: str,
149
- report: Report,
150
- base_path: str | Path,
151
- overwrite: bool = False,
152
- refresh_heads: bool = True) -> int:
153
- if workflow.startswith("cfg:"):
154
- return save_config_key_history(workflow[4:], report, base_path)
155
-
156
- logger.debug(
157
- f'Saving report for "{workflow}", {report.in_spec=}, {report.bad_data=}, {report.fully_calibrated=}'
158
- )
159
- base_path = Path(base_path)
160
- try:
161
- buf = lzma.compress(pickle.dumps(report))
162
- except:
163
- raise ValueError(f"Can't pickle report for {workflow}")
164
- if overwrite:
165
- path = report.path
166
- if path is None:
167
- raise ValueError("Report path is None, can't overwrite.")
168
- with open(base_path / 'reports' / path, "rb") as f:
169
- index = int.from_bytes(f.read(8), 'big')
170
- report.index = index
171
- else:
172
- path = random_path(base_path / 'reports')
173
- (base_path / 'reports' / path).parent.mkdir(parents=True,
174
- exist_ok=True)
175
- report.path = path
176
- report.index = create_index("report",
177
- base_path,
178
- context=str(path),
179
- width=35)
180
- with open(base_path / 'reports' / path, "wb") as f:
181
- f.write(report.index.to_bytes(8, 'big'))
182
- f.write(buf)
183
- if refresh_heads:
184
- set_head(workflow, path, base_path)
185
- return report.index
186
-
187
-
188
- def load_report(path: str | Path, base_path: str | Path) -> Report | None:
189
- base_path = Path(base_path)
190
- path = base_path / 'reports' / path
191
-
192
- with open(base_path / 'reports' / path, "rb") as f:
193
- index = int.from_bytes(f.read(8), 'big')
194
- report = pickle.loads(lzma.decompress(f.read()))
195
- report.base_path = base_path
196
- report.index = index
197
- return report
198
-
199
-
200
144
  def find_report(
201
145
  workflow: str, base_path: str | Path = get_config_value("data", Path)
202
146
  ) -> Report | None:
@@ -236,6 +180,26 @@ def revoke_report(workflow: str, report: Report | None, base_path: str | Path):
236
180
  refresh_heads=True)
237
181
 
238
182
 
183
+ def get_report_by_index(
184
+ index: int, base_path: str | Path = get_config_value("data", Path)
185
+ ) -> Report | None:
186
+ try:
187
+ path = query_index("report", base_path, index)
188
+ return load_report(path, base_path)
189
+ except:
190
+ raise
191
+ return None
192
+
193
+
194
+ def get_head(workflow: str, base_path: str | Path) -> Path | None:
195
+ return get_heads(base_path).get(workflow, None)
196
+
197
+
198
+ #########################################################################
199
+ ## Basic Write API ##
200
+ #########################################################################
201
+
202
+
239
203
  def set_head(workflow: str, path: Path, base_path: str | Path):
240
204
  base_path = Path(base_path)
241
205
  base_path.mkdir(parents=True, exist_ok=True)
@@ -249,24 +213,44 @@ def set_head(workflow: str, path: Path, base_path: str | Path):
249
213
  pickle.dump(heads, f)
250
214
 
251
215
 
252
- def get_head(workflow: str, base_path: str | Path) -> Path | None:
253
- base_path = Path(base_path)
254
- try:
255
- with open(base_path / "heads", "rb") as f:
256
- heads = pickle.load(f)
257
- return heads[workflow]
258
- except:
259
- return None
260
-
216
+ def save_report(workflow: str,
217
+ report: Report,
218
+ base_path: str | Path,
219
+ overwrite: bool = False,
220
+ refresh_heads: bool = True) -> int:
221
+ if workflow.startswith("cfg:"):
222
+ return save_config_key_history(workflow[4:], report, base_path)
261
223
 
262
- def get_heads(base_path: str | Path) -> Path | None:
224
+ logger.debug(
225
+ f'Saving report for "{workflow}", {report.in_spec=}, {report.bad_data=}, {report.fully_calibrated=}'
226
+ )
263
227
  base_path = Path(base_path)
264
228
  try:
265
- with open(base_path / "heads", "rb") as f:
266
- heads = pickle.load(f)
267
- return heads
229
+ buf = lzma.compress(pickle.dumps(report))
268
230
  except:
269
- return {}
231
+ raise ValueError(f"Can't pickle report for {workflow}")
232
+ if overwrite:
233
+ path = report.path
234
+ if path is None:
235
+ raise ValueError("Report path is None, can't overwrite.")
236
+ with open(base_path / 'reports' / path, "rb") as f:
237
+ index = int.from_bytes(f.read(8), 'big')
238
+ report.index = index
239
+ else:
240
+ path = random_path(base_path / 'reports')
241
+ (base_path / 'reports' / path).parent.mkdir(parents=True,
242
+ exist_ok=True)
243
+ report.path = path
244
+ report.index = create_index("report",
245
+ base_path,
246
+ context=str(path),
247
+ width=35)
248
+ with open(base_path / 'reports' / path, "wb") as f:
249
+ f.write(report.index.to_bytes(8, 'big'))
250
+ f.write(buf)
251
+ if refresh_heads:
252
+ set_head(workflow, path, base_path)
253
+ return report.index
270
254
 
271
255
 
272
256
  def create_index(name: str,
@@ -302,27 +286,6 @@ def create_index(name: str,
302
286
  return index
303
287
 
304
288
 
305
- @lru_cache(maxsize=4096)
306
- def query_index(name: str, base_path: str | Path, index: int):
307
- path = Path(base_path) / "index" / name
308
- width = int(path.with_suffix('.width').read_text())
309
-
310
- with path.with_suffix('.idx').open("r") as f:
311
- f.seek(index * (width + 1))
312
- context = f.read(width)
313
- return context.rstrip()
314
-
315
-
316
- def get_report_by_index(
317
- index: int, base_path: str | Path = get_config_value("data", Path)
318
- ) -> Report | None:
319
- try:
320
- path = query_index("report", base_path, index)
321
- return load_report(path, base_path)
322
- except:
323
- return None
324
-
325
-
326
289
  def save_item(item, data_path):
327
290
  salt = 0
328
291
  buf = pickle.dumps(item)
@@ -345,10 +308,244 @@ def save_item(item, data_path):
345
308
  return str(item_id)
346
309
 
347
310
 
311
+ def save_config_key_history(key: str, report: Report,
312
+ base_path: str | Path) -> int:
313
+ global __current_config_cache
314
+ base_path = Path(base_path) / 'state'
315
+ base_path.mkdir(parents=True, exist_ok=True)
316
+
317
+ if __current_config_cache is None:
318
+ if (base_path / 'parameters.pkl').exists():
319
+ with open(base_path / 'parameters.pkl', 'rb') as f:
320
+ __current_config_cache = pickle.load(f)
321
+ else:
322
+ __current_config_cache = {}
323
+
324
+ __current_config_cache[
325
+ key] = report.data, report.calibrated_time, report.checked_time
326
+
327
+ with open(base_path / 'parameters.pkl', 'wb') as f:
328
+ pickle.dump(__current_config_cache, f)
329
+ return 0
330
+
331
+
332
+ #########################################################################
333
+ ## Basic Read API ##
334
+ #########################################################################
335
+
336
+
337
+ def load_report(path: str | Path, base_path: str | Path) -> Report | None:
338
+ if isinstance(base_path, str) and base_path.startswith('ssh '):
339
+ with SSHClient() as client:
340
+ cfg, base_path = _pase_ssh_config(base_path[4:])
341
+ client.load_system_host_keys()
342
+ client.connect(**cfg)
343
+ return load_report_from_scp(path, base_path, client)
344
+
345
+ base_path = Path(base_path)
346
+ if zipfile.is_zipfile(base_path):
347
+ return load_report_from_zipfile(path, base_path)
348
+
349
+ path = base_path / 'reports' / path
350
+
351
+ with open(base_path / 'reports' / path, "rb") as f:
352
+ index = int.from_bytes(f.read(8), 'big')
353
+ report = pickle.loads(lzma.decompress(f.read()))
354
+ report.base_path = base_path
355
+ report.index = index
356
+ return report
357
+
358
+
359
+ def get_heads(base_path: str | Path) -> Path | None:
360
+ if isinstance(base_path, str) and base_path.startswith('ssh '):
361
+ with SSHClient() as client:
362
+ cfg, base_path = _pase_ssh_config(base_path[4:])
363
+ client.load_system_host_keys()
364
+ client.connect(**cfg)
365
+ return get_heads_from_scp(base_path, client)
366
+
367
+ base_path = Path(base_path)
368
+ if zipfile.is_zipfile(base_path):
369
+ return get_heads_from_zipfile(base_path)
370
+ try:
371
+ with open(base_path / "heads", "rb") as f:
372
+ heads = pickle.load(f)
373
+ return heads
374
+ except:
375
+ return {}
376
+
377
+
378
+ @lru_cache(maxsize=4096)
379
+ def query_index(name: str, base_path: str | Path, index: int):
380
+ if isinstance(base_path, str) and base_path.startswith('ssh '):
381
+ with SSHClient() as client:
382
+ cfg, base_path = _pase_ssh_config(base_path[4:])
383
+ client.load_system_host_keys()
384
+ client.connect(**cfg)
385
+ return query_index_from_scp(name, base_path, client, index)
386
+
387
+ base_path = Path(base_path)
388
+ if zipfile.is_zipfile(base_path):
389
+ return query_index_from_zipfile(name, base_path, index)
390
+ path = Path(base_path) / "index" / name
391
+ width = int(path.with_suffix('.width').read_text())
392
+
393
+ with path.with_suffix('.idx').open("r") as f:
394
+ f.seek(index * (width + 1))
395
+ context = f.read(width)
396
+ return context.rstrip()
397
+
398
+
348
399
  @lru_cache(maxsize=4096)
349
- def load_item(id, data_path):
350
- path = Path(data_path) / 'items' / id
351
- with open(path, 'rb') as f:
352
- buf = f.read()
353
- cfg = pickle.loads(lzma.decompress(buf))
354
- return cfg
400
+ def load_item(id, base_path):
401
+ if isinstance(base_path, str) and base_path.startswith('ssh '):
402
+ with SSHClient() as client:
403
+ cfg, base_path = _pase_ssh_config(base_path[4:])
404
+ client.load_system_host_keys()
405
+ client.connect(**cfg)
406
+ buf = load_item_buf_from_scp(id, base_path, client)
407
+ else:
408
+ base_path = Path(base_path)
409
+ if zipfile.is_zipfile(base_path):
410
+ buf = load_item_buf_from_zipfile(id, base_path)
411
+ else:
412
+ path = Path(base_path) / 'items' / id
413
+ with open(path, 'rb') as f:
414
+ buf = f.read()
415
+ item = pickle.loads(lzma.decompress(buf))
416
+ return item
417
+
418
+
419
+ def find_config_key_history(key: str, base_path: str | Path) -> Report | None:
420
+ global __current_config_cache
421
+ base_path = Path(base_path) / 'state'
422
+ if __current_config_cache is None:
423
+ if (base_path / 'parameters.pkl').exists():
424
+ with open(base_path / 'parameters.pkl', 'rb') as f:
425
+ __current_config_cache = pickle.load(f)
426
+ else:
427
+ __current_config_cache = {}
428
+
429
+ if key in __current_config_cache:
430
+ value, calibrated_time, checked_time = __current_config_cache.get(
431
+ key, None)
432
+ report = Report(
433
+ workflow=f'cfg:{key}',
434
+ bad_data=False,
435
+ in_spec=True,
436
+ fully_calibrated=True,
437
+ parameters={key: value},
438
+ data=value,
439
+ calibrated_time=calibrated_time,
440
+ checked_time=checked_time,
441
+ )
442
+ return report
443
+ return None
444
+
445
+
446
+ #########################################################################
447
+ ## Zipfile support ##
448
+ #########################################################################
449
+
450
+
451
+ def load_report_from_zipfile(path: str | Path,
452
+ base_path: str | Path) -> Report | None:
453
+ path = Path(path)
454
+ with zipfile.ZipFile(base_path) as zf:
455
+ path = '/'.join(path.parts)
456
+ with zf.open(f"{base_path.stem}/reports/{path}") as f:
457
+ index = int.from_bytes(f.read(8), 'big')
458
+ report = pickle.loads(lzma.decompress(f.read()))
459
+ report.base_path = base_path
460
+ report.index = index
461
+ return report
462
+
463
+
464
+ def get_heads_from_zipfile(base_path: str | Path) -> Path | None:
465
+ with zipfile.ZipFile(base_path) as zf:
466
+ with zf.open(f"{base_path.stem}/heads") as f:
467
+ heads = pickle.load(f)
468
+ return heads
469
+
470
+
471
+ def query_index_from_zipfile(name: str, base_path: str | Path, index: int):
472
+ with zipfile.ZipFile(base_path) as zf:
473
+ with zf.open(f"{base_path.stem}/index/{name}.width") as f:
474
+ width = int(f.read().decode())
475
+ with zf.open(f"{base_path.stem}/index/{name}.idx") as f:
476
+ f.seek(index * (width + 1))
477
+ context = f.read(width).decode()
478
+ return context.rstrip()
479
+
480
+
481
+ def load_item_buf_from_zipfile(id, base_path):
482
+ with zipfile.ZipFile(base_path) as zf:
483
+ with zf.open(f"{base_path.stem}/items/{id}") as f:
484
+ return f.read()
485
+
486
+
487
+ #########################################################################
488
+ ## SCP support ##
489
+ #########################################################################
490
+
491
+
492
+ def _pase_ssh_config(config: str):
493
+ config = config.split()
494
+ base_path = ' '.join(config[4:])
495
+ return {
496
+ 'hostname': config[0],
497
+ 'port': int(config[1]),
498
+ 'username': config[2],
499
+ 'key_filename': config[3]
500
+ }, Path(base_path)
501
+
502
+
503
+ def load_report_from_scp(path: str | Path, base_path: Path,
504
+ client: SSHClient) -> Report:
505
+ try:
506
+ path = Path(path)
507
+ with client.open_sftp() as sftp:
508
+ with sftp.open(str(Path(base_path) / 'reports' / path), 'rb') as f:
509
+ index = int.from_bytes(f.read(8), 'big')
510
+ report = pickle.loads(lzma.decompress(f.read()))
511
+ report.base_path = path
512
+ report.index = index
513
+ return report
514
+ except SSHException:
515
+ raise ValueError(f"Can't load report from {path}")
516
+
517
+
518
+ def get_heads_from_scp(base_path: Path, client: SSHClient) -> Path | None:
519
+ try:
520
+ with client.open_sftp() as sftp:
521
+ with sftp.open(str(Path(base_path) / 'heads'), 'rb') as f:
522
+ heads = pickle.load(f)
523
+ return heads
524
+ except SSHException:
525
+ return None
526
+
527
+
528
+ def query_index_from_scp(name: str, base_path: Path, client: SSHClient,
529
+ index: int):
530
+ try:
531
+ with client.open_sftp() as sftp:
532
+ s = str(Path(base_path) / 'index' / f'{name}.width')
533
+ with sftp.open(s, 'rb') as f:
534
+ width = int(f.read().decode())
535
+ with sftp.open(str(base_path / 'index' / f'{name}.idx'),
536
+ 'rb') as f:
537
+ f.seek(index * (width + 1))
538
+ context = f.read(width).decode()
539
+ return context.rstrip()
540
+ except SSHException:
541
+ return None
542
+
543
+
544
+ def load_item_buf_from_scp(id: str, base_path: Path, client: SSHClient):
545
+ try:
546
+ with client.open_sftp() as sftp:
547
+ with sftp.open(str(Path(base_path) / 'items' / str(id)),
548
+ 'rb') as f:
549
+ return f.read()
550
+ except SSHException:
551
+ return None
Binary file
qulab/utils.py CHANGED
@@ -42,7 +42,7 @@ def _unix_detach_with_tmux_or_screen(executable_path):
42
42
  "-d",
43
43
  "-s",
44
44
  session_name,
45
- safe_path + " ; tmux wait-for -S finished", # 等待命令结束
45
+ executable_path + " ; tmux wait-for -S finished", # 等待命令结束
46
46
  ";",
47
47
  "tmux",
48
48
  "wait-for",
@@ -55,7 +55,7 @@ def _unix_detach_with_tmux_or_screen(executable_path):
55
55
 
56
56
  # 尝试 screen
57
57
  elif _check_command_exists("screen"):
58
- command = ["screen", "-dmS", session_name, safe_path]
58
+ command = ["screen", "-dmS", session_name, executable_path]
59
59
  subprocess.Popen(command, start_new_session=True)
60
60
  click.echo(f"已启动 screen 会话: {session_name}")
61
61
  click.echo(f"你可以使用 `screen -r {session_name}` 来查看输出")
@@ -66,18 +66,18 @@ def _unix_detach_with_tmux_or_screen(executable_path):
66
66
 
67
67
  def run_detached_with_terminal(executable_path):
68
68
  """回退到带终端窗口的方案"""
69
- safe_path = shlex.quote(executable_path)
70
69
  if sys.platform == 'win32':
71
70
  _windows_start(executable_path)
72
71
  elif sys.platform == 'darwin':
73
- script = f'tell app "Terminal" to do script "{safe_path}"'
72
+ script = f'tell app "Terminal" to do script "{executable_path}"'
74
73
  subprocess.Popen(["osascript", "-e", script], start_new_session=True)
75
74
  else:
76
75
  try:
77
- subprocess.Popen(["gnome-terminal", "--", "sh", "-c", safe_path],
78
- start_new_session=True)
76
+ subprocess.Popen(
77
+ ["gnome-terminal", "--", "sh", "-c", executable_path],
78
+ start_new_session=True)
79
79
  except FileNotFoundError:
80
- subprocess.Popen(["xterm", "-e", safe_path],
80
+ subprocess.Popen(["xterm", "-e", executable_path],
81
81
  start_new_session=True)
82
82
 
83
83
 
qulab/version.py CHANGED
@@ -1 +1 @@
1
- __version__ = "2.7.18"
1
+ __version__ = "2.8.0"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: QuLab
3
- Version: 2.7.18
3
+ Version: 2.8.0
4
4
  Summary: contral instruments and manage data
5
5
  Author-email: feihoo87 <feihoo87@gmail.com>
6
6
  Maintainer-email: feihoo87 <feihoo87@gmail.com>
@@ -1,18 +1,18 @@
1
1
  qulab/__init__.py,sha256=RZme5maBSMZpP6ckXymqZpo2sRYttwEpTYCIzIvys1c,292
2
2
  qulab/__main__.py,sha256=FL4YsGZL1jEtmcPc5WbleArzhOHLMsWl7OH3O-1d1ss,72
3
3
  qulab/dicttree.py,sha256=ZoSJVWK4VMqfzj42gPb_n5RqLlM6K1Me0WmLIfLEYf8,14195
4
- qulab/fun.cp311-win_amd64.pyd,sha256=6RxYFz6SnyWLym6Rrq0e6W5vl3AaPXgwzyb6YjDzc80,31744
4
+ qulab/fun.cp311-win_amd64.pyd,sha256=3uplLogKifP8ydqTn5YcrV66KWQ7C9pDdbS23CrkVlY,31744
5
5
  qulab/typing.py,sha256=PRtwbCHWY2ROKK8GHq4Bo8llXrIGo6xC73DrQf7S9os,71
6
- qulab/utils.py,sha256=kSy_tQRLDdlMwk7XhGhg75JadNHbDkau5vYcfOlJG_4,3088
7
- qulab/version.py,sha256=xLneP_cqIForMj5AICyQrp13un8yGC7zOY4cjyKywXk,22
6
+ qulab/utils.py,sha256=65N2Xj7kqRsQ4epoLNY6tL-i5ts6Wk8YuJYee3Te6zI,3077
7
+ qulab/version.py,sha256=HoslYmXXy3WAQMEXd5jNDwMD2rL7bVPI5DzutSN5GNc,21
8
8
  qulab/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
9
9
  qulab/cli/commands.py,sha256=6xd2eYw32k1NmfAuYSu__1kaP12Oz1QVqwbkYXdWno4,588
10
10
  qulab/cli/config.py,sha256=7h3k0K8FYHhI6LVWt8BoDdKrX2ApFDBAUAUuXhHwst4,3799
11
11
  qulab/executor/__init__.py,sha256=LosPzOMaljSZY1thy_Fxtbrgq7uubJszMABEB7oM7tU,101
12
- qulab/executor/cli.py,sha256=z8W1RivKdABQSOGy2viNUvG73QvOBpE9gSKjw45vSVA,9794
13
- qulab/executor/load.py,sha256=omD2aklKZnHOosahI-Vs6sQkGdn4U9PgnDIAbrEOhi0,17786
12
+ qulab/executor/cli.py,sha256=8d-8bRWZ5lmsMtjASsl1zu1rV-syeAESMNVthvIQxlo,10018
13
+ qulab/executor/load.py,sha256=YndvzagvWR8Sg6WHZ-gP-Of0FrFOyh_E_a3VXsjDf1Q,17502
14
14
  qulab/executor/schedule.py,sha256=0BV5LGxhqdIlGwW6-o5_5mljAtdtL1La8EDNBFi8pzU,18585
15
- qulab/executor/storage.py,sha256=gI6g28BmKKEZ_Pl-hFwvpiOj3mF8Su-yjj3hfMXs1VY,11630
15
+ qulab/executor/storage.py,sha256=2YWUxYis8QlYLhzvvwoS2Wmyb9UYaojEh6X20ICePWI,19014
16
16
  qulab/executor/template.py,sha256=bKMoOBPfa3XMgTfGHQK6pDTswH1vcIjnopaWE3UKpP0,7726
17
17
  qulab/executor/transform.py,sha256=BDx0c4nqTHMAOLVqju0Ydd91uxNm6EpVIfssjZse0bI,2284
18
18
  qulab/executor/utils.py,sha256=l_b0y2kMwYKyyXeFtoblPYwKNU-wiFQ9PMo9QlWl9wE,6213
@@ -97,9 +97,9 @@ qulab/visualization/plot_seq.py,sha256=Uo1-dB1YE9IN_A9tuaOs9ZG3S5dKDQ_l98iD2Wbxp
97
97
  qulab/visualization/qdat.py,sha256=HubXFu4nfcA7iUzghJGle1C86G6221hicLR0b-GqhKQ,5887
98
98
  qulab/visualization/rot3d.py,sha256=jGHJcqj1lEWBUV-W4GUGONGacqjrYvuFoFCwPse5h1Y,757
99
99
  qulab/visualization/widgets.py,sha256=HcYwdhDtLreJiYaZuN3LfofjJmZcLwjMfP5aasebgDo,3266
100
- qulab-2.7.18.dist-info/LICENSE,sha256=b4NRQ-GFVpJMT7RuExW3NwhfbrYsX7AcdB7Gudok-fs,1086
101
- qulab-2.7.18.dist-info/METADATA,sha256=ftNypm4S-GR5LeN2SHGHdrAlv0dvHksDaQH_XwPoFo8,3804
102
- qulab-2.7.18.dist-info/WHEEL,sha256=KJXh1V8Kwbv4sLWZhe-NnXz7pUnJH10x-MhKeMsQYvA,101
103
- qulab-2.7.18.dist-info/entry_points.txt,sha256=b0v1GXOwmxY-nCCsPN_rHZZvY9CtTbWqrGj8u1m8yHo,45
104
- qulab-2.7.18.dist-info/top_level.txt,sha256=3T886LbAsbvjonu_TDdmgxKYUn939BVTRPxPl9r4cEg,6
105
- qulab-2.7.18.dist-info/RECORD,,
100
+ qulab-2.8.0.dist-info/LICENSE,sha256=b4NRQ-GFVpJMT7RuExW3NwhfbrYsX7AcdB7Gudok-fs,1086
101
+ qulab-2.8.0.dist-info/METADATA,sha256=7GHqBOgAMQ_zxJaGPbsc6AbwGMQ7IObgjC9Dh5k7ggM,3803
102
+ qulab-2.8.0.dist-info/WHEEL,sha256=sth_4wOlmB4B7KwCBgmeM0_a5vgcRqkZDHCDdXAQZRc,101
103
+ qulab-2.8.0.dist-info/entry_points.txt,sha256=b0v1GXOwmxY-nCCsPN_rHZZvY9CtTbWqrGj8u1m8yHo,45
104
+ qulab-2.8.0.dist-info/top_level.txt,sha256=3T886LbAsbvjonu_TDdmgxKYUn939BVTRPxPl9r4cEg,6
105
+ qulab-2.8.0.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (76.0.0)
2
+ Generator: setuptools (76.1.0)
3
3
  Root-Is-Purelib: false
4
4
  Tag: cp311-cp311-win_amd64
5
5