QuLab 2.5.4__tar.gz → 2.6.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {qulab-2.5.4 → qulab-2.6.0}/PKG-INFO +1 -1
- {qulab-2.5.4 → qulab-2.6.0}/QuLab.egg-info/PKG-INFO +1 -1
- {qulab-2.5.4 → qulab-2.6.0}/qulab/executor/cli.py +4 -4
- {qulab-2.5.4 → qulab-2.6.0}/qulab/executor/load.py +95 -3
- {qulab-2.5.4 → qulab-2.6.0}/qulab/executor/schedule.py +51 -19
- {qulab-2.5.4 → qulab-2.6.0}/qulab/executor/storage.py +41 -21
- qulab-2.6.0/qulab/executor/transform.py +122 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/executor/utils.py +21 -0
- qulab-2.6.0/qulab/version.py +1 -0
- qulab-2.5.4/qulab/executor/transform.py +0 -55
- qulab-2.5.4/qulab/version.py +0 -1
- {qulab-2.5.4 → qulab-2.6.0}/LICENSE +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/MANIFEST.in +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/QuLab.egg-info/SOURCES.txt +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/QuLab.egg-info/dependency_links.txt +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/QuLab.egg-info/entry_points.txt +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/QuLab.egg-info/requires.txt +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/QuLab.egg-info/top_level.txt +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/README.md +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/pyproject.toml +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/__init__.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/__main__.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/cli/__init__.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/cli/commands.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/cli/config.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/dicttree.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/executor/__init__.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/monitor/__init__.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/monitor/__main__.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/monitor/config.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/monitor/dataset.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/monitor/event_queue.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/monitor/mainwindow.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/monitor/monitor.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/monitor/ploter.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/monitor/qt_compat.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/monitor/toolbar.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/scan/__init__.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/scan/curd.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/scan/expression.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/scan/models.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/scan/optimize.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/scan/query.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/scan/record.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/scan/scan.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/scan/server.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/scan/space.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/scan/utils.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/storage/__init__.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/storage/__main__.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/storage/backend/__init__.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/storage/backend/redis.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/storage/base_dataset.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/storage/chunk.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/storage/dataset.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/storage/file.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/storage/models/__init__.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/storage/models/base.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/storage/models/config.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/storage/models/file.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/storage/models/ipy.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/storage/models/models.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/storage/models/record.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/storage/models/report.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/storage/models/tag.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/storage/storage.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/sys/__init__.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/sys/chat.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/sys/device/__init__.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/sys/device/basedevice.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/sys/device/loader.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/sys/device/utils.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/sys/drivers/FakeInstrument.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/sys/drivers/__init__.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/sys/ipy_events.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/sys/net/__init__.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/sys/net/bencoder.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/sys/net/cli.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/sys/net/dhcp.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/sys/net/dhcpd.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/sys/net/kad.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/sys/net/kcp.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/sys/net/nginx.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/sys/progress.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/sys/rpc/__init__.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/sys/rpc/client.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/sys/rpc/exceptions.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/sys/rpc/msgpack.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/sys/rpc/msgpack.pyi +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/sys/rpc/router.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/sys/rpc/rpc.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/sys/rpc/serialize.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/sys/rpc/server.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/sys/rpc/socket.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/sys/rpc/utils.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/sys/rpc/worker.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/sys/rpc/zmq_socket.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/typing.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/visualization/__init__.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/visualization/__main__.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/visualization/_autoplot.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/visualization/plot_circ.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/visualization/plot_layout.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/visualization/plot_seq.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/visualization/qdat.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/visualization/rot3d.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/qulab/visualization/widgets.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/setup.cfg +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/setup.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/src/qulab.h +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/tests/test_kad.py +0 -0
- {qulab-2.5.4 → qulab-2.6.0}/tests/test_scan.py +0 -0
|
@@ -102,7 +102,7 @@ def set(key, value, api):
|
|
|
102
102
|
from . import transform
|
|
103
103
|
if api is not None:
|
|
104
104
|
api = importlib.import_module(api)
|
|
105
|
-
set_config_api(api.query_config, api.update_config)
|
|
105
|
+
set_config_api(api.query_config, api.update_config, api.export_config)
|
|
106
106
|
try:
|
|
107
107
|
value = eval(value)
|
|
108
108
|
except:
|
|
@@ -124,7 +124,7 @@ def get(key, api):
|
|
|
124
124
|
from . import transform
|
|
125
125
|
if api is not None:
|
|
126
126
|
api = importlib.import_module(api)
|
|
127
|
-
set_config_api(api.query_config, api.update_config)
|
|
127
|
+
set_config_api(api.query_config, api.update_config, api.export_config)
|
|
128
128
|
click.echo(transform.query_config(key))
|
|
129
129
|
|
|
130
130
|
|
|
@@ -147,7 +147,7 @@ def run(workflow, code, data, api, plot, no_dependents, update):
|
|
|
147
147
|
)
|
|
148
148
|
if api is not None:
|
|
149
149
|
api = importlib.import_module(api)
|
|
150
|
-
set_config_api(api.query_config, api.update_config)
|
|
150
|
+
set_config_api(api.query_config, api.update_config, api.export_config)
|
|
151
151
|
if code is None:
|
|
152
152
|
code = Path.cwd()
|
|
153
153
|
if data is None:
|
|
@@ -197,7 +197,7 @@ def maintain(workflow, code, data, api, plot):
|
|
|
197
197
|
)
|
|
198
198
|
if api is not None:
|
|
199
199
|
api = importlib.import_module(api)
|
|
200
|
-
set_config_api(api.query_config, api.update_config)
|
|
200
|
+
set_config_api(api.query_config, api.update_config, api.export_config)
|
|
201
201
|
if code is None:
|
|
202
202
|
code = Path.cwd()
|
|
203
203
|
if data is None:
|
|
@@ -11,6 +11,7 @@ import warnings
|
|
|
11
11
|
from importlib.util import module_from_spec, spec_from_file_location
|
|
12
12
|
from pathlib import Path
|
|
13
13
|
from types import ModuleType
|
|
14
|
+
from typing import Any
|
|
14
15
|
|
|
15
16
|
from loguru import logger
|
|
16
17
|
|
|
@@ -107,7 +108,8 @@ def verify_calibrate_method(module: WorkflowType):
|
|
|
107
108
|
def verify_check_method(module: WorkflowType):
|
|
108
109
|
if not hasattr(module, 'check'):
|
|
109
110
|
warnings.warn(
|
|
110
|
-
f"Workflow {module.__file__} does not have 'check' function"
|
|
111
|
+
f"Workflow {module.__file__} does not have 'check' function, it will be set to 'calibrate' function"
|
|
112
|
+
)
|
|
111
113
|
else:
|
|
112
114
|
if not can_call_without_args(module.check):
|
|
113
115
|
raise AttributeError(
|
|
@@ -120,6 +122,93 @@ def verify_check_method(module: WorkflowType):
|
|
|
120
122
|
)
|
|
121
123
|
|
|
122
124
|
|
|
125
|
+
def verify_dependence_key(workflow: str | tuple[str, dict[str, Any]]
|
|
126
|
+
| tuple[str, str, dict[str, Any]]):
|
|
127
|
+
if isinstance(workflow, str):
|
|
128
|
+
return
|
|
129
|
+
if not isinstance(workflow, tuple) or len(workflow) not in [2, 3]:
|
|
130
|
+
raise ValueError(f"Invalid workflow: {workflow}")
|
|
131
|
+
|
|
132
|
+
if len(workflow) == 2:
|
|
133
|
+
file_name, mapping = workflow
|
|
134
|
+
if not Path(file_name).exists():
|
|
135
|
+
raise FileNotFoundError(f"File not found: {file_name}")
|
|
136
|
+
elif len(workflow) == 3:
|
|
137
|
+
template_path, target_path, mapping = workflow
|
|
138
|
+
if not Path(template_path).exists():
|
|
139
|
+
raise FileNotFoundError(f"File not found: {template_path}")
|
|
140
|
+
if not isinstance(target_path, (Path, str)) or target_path == '':
|
|
141
|
+
raise ValueError(f"Invalid target_path: {target_path}")
|
|
142
|
+
if not isinstance(target_path, (Path, str)):
|
|
143
|
+
raise ValueError(f"Invalid target_path: {target_path}")
|
|
144
|
+
if Path(target_path).suffix != '.py':
|
|
145
|
+
raise ValueError(
|
|
146
|
+
f"Invalid target_path: {target_path}. Only .py file is supported"
|
|
147
|
+
)
|
|
148
|
+
else:
|
|
149
|
+
raise ValueError(f"Invalid workflow: {workflow}")
|
|
150
|
+
|
|
151
|
+
if not isinstance(mapping, dict):
|
|
152
|
+
raise ValueError(f"Invalid mapping: {mapping}")
|
|
153
|
+
|
|
154
|
+
for key, value in mapping.items():
|
|
155
|
+
if not isinstance(key, str):
|
|
156
|
+
raise ValueError(
|
|
157
|
+
f"Invalid key: {key}, should be str type and valid identifier")
|
|
158
|
+
if not key.isidentifier():
|
|
159
|
+
raise ValueError(f"Invalid key: {key}, should be identifier")
|
|
160
|
+
try:
|
|
161
|
+
pickle.dumps(value)
|
|
162
|
+
except Exception as e:
|
|
163
|
+
raise ValueError(
|
|
164
|
+
f"Invalid value: {key}: {value}, should be pickleable") from e
|
|
165
|
+
return
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
def verify_depends(module: WorkflowType):
|
|
169
|
+
if not hasattr(module, 'depends'):
|
|
170
|
+
return
|
|
171
|
+
|
|
172
|
+
deps = []
|
|
173
|
+
|
|
174
|
+
if callable(module.depends):
|
|
175
|
+
if not can_call_without_args(module.depends):
|
|
176
|
+
raise AttributeError(
|
|
177
|
+
f"Workflow {module.__file__} 'depends' function should not have any parameters"
|
|
178
|
+
)
|
|
179
|
+
deps = list(module.depends())
|
|
180
|
+
elif isinstance(module.depends, (list, tuple)):
|
|
181
|
+
deps = module.depends
|
|
182
|
+
else:
|
|
183
|
+
raise AttributeError(
|
|
184
|
+
f"Workflow {module.__file__} 'depends' should be a callable or a list"
|
|
185
|
+
)
|
|
186
|
+
for workflow in deps:
|
|
187
|
+
verify_dependence_key(workflow)
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
def verify_entries(module: WorkflowType):
|
|
191
|
+
if not hasattr(module, 'entries'):
|
|
192
|
+
return
|
|
193
|
+
|
|
194
|
+
deps = []
|
|
195
|
+
|
|
196
|
+
if callable(module.entries):
|
|
197
|
+
if not can_call_without_args(module.entries):
|
|
198
|
+
raise AttributeError(
|
|
199
|
+
f"Workflow {module.__file__} 'entries' function should not have any parameters"
|
|
200
|
+
)
|
|
201
|
+
deps = list(module.entries())
|
|
202
|
+
elif isinstance(module.entries, (list, tuple)):
|
|
203
|
+
deps = module.entries
|
|
204
|
+
else:
|
|
205
|
+
raise AttributeError(
|
|
206
|
+
f"Workflow {module.__file__} 'entries' should be a callable or a list"
|
|
207
|
+
)
|
|
208
|
+
for workflow in deps:
|
|
209
|
+
verify_dependence_key(workflow)
|
|
210
|
+
|
|
211
|
+
|
|
123
212
|
def is_workflow(module: ModuleType) -> bool:
|
|
124
213
|
try:
|
|
125
214
|
verify_calibrate_method(module)
|
|
@@ -218,14 +307,15 @@ def load_workflow_from_file(file_name: str,
|
|
|
218
307
|
module.__mtime__ = (base_path / path).stat().st_mtime
|
|
219
308
|
|
|
220
309
|
if hasattr(module, 'entries'):
|
|
310
|
+
verify_entries(module)
|
|
221
311
|
return module
|
|
222
312
|
|
|
223
313
|
if not hasattr(module, '__timeout__'):
|
|
224
314
|
module.__timeout__ = None
|
|
225
315
|
|
|
226
316
|
if not hasattr(module, 'depends'):
|
|
227
|
-
module.depends = lambda: [
|
|
228
|
-
|
|
317
|
+
module.depends = lambda: []
|
|
318
|
+
verify_depends(module)
|
|
229
319
|
verify_calibrate_method(module)
|
|
230
320
|
verify_check_method(module)
|
|
231
321
|
|
|
@@ -415,5 +505,7 @@ def make_graph(workflow: WorkflowType, graph: dict, code_path: str | Path):
|
|
|
415
505
|
for w in get_dependents(workflow, code_path):
|
|
416
506
|
graph[workflow.__workflow_id__].append(w.__workflow_id__)
|
|
417
507
|
make_graph(w, graph=graph, code_path=code_path)
|
|
508
|
+
if graph[workflow.__workflow_id__] == []:
|
|
509
|
+
del graph[workflow.__workflow_id__]
|
|
418
510
|
|
|
419
511
|
return graph
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import functools
|
|
2
|
+
import pickle
|
|
2
3
|
import uuid
|
|
3
4
|
from datetime import datetime, timedelta
|
|
4
5
|
from pathlib import Path
|
|
@@ -8,13 +9,34 @@ from loguru import logger
|
|
|
8
9
|
from .load import WorkflowType, get_dependents
|
|
9
10
|
from .storage import (Result, find_result, renew_result, revoke_result,
|
|
10
11
|
save_result)
|
|
11
|
-
from .transform import update_parameters
|
|
12
|
+
from .transform import current_config, update_parameters
|
|
12
13
|
|
|
13
14
|
|
|
14
15
|
class CalibrationFailedError(Exception):
|
|
15
16
|
pass
|
|
16
17
|
|
|
17
18
|
|
|
19
|
+
def is_pickleable(obj) -> bool:
|
|
20
|
+
try:
|
|
21
|
+
pickle.dumps(obj)
|
|
22
|
+
return True
|
|
23
|
+
except:
|
|
24
|
+
return False
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def veryfy_analyzed_result(result: Result, script: str, method: str):
|
|
28
|
+
if not isinstance(result, Result):
|
|
29
|
+
raise TypeError(f'"{script}" : "{method}" must return a Result object')
|
|
30
|
+
if not is_pickleable(result.parameters):
|
|
31
|
+
raise TypeError(
|
|
32
|
+
f'"{script}" : "{method}" return not pickleable data in .parameters'
|
|
33
|
+
)
|
|
34
|
+
if not is_pickleable(result.other_infomation):
|
|
35
|
+
raise TypeError(
|
|
36
|
+
f'"{script}" : "{method}" return not pickleable data in .other_infomation'
|
|
37
|
+
)
|
|
38
|
+
|
|
39
|
+
|
|
18
40
|
def check_state(workflow: WorkflowType, code_path: str | Path,
|
|
19
41
|
state_path: str | Path) -> bool:
|
|
20
42
|
"""
|
|
@@ -73,17 +95,11 @@ def call_analyzer(node,
|
|
|
73
95
|
plot=False) -> Result:
|
|
74
96
|
if check:
|
|
75
97
|
result = node.check_analyze(result, history=history)
|
|
76
|
-
|
|
77
|
-
raise TypeError(
|
|
78
|
-
f'"{node.__workflow_id__}" : "check_analyze" must return a Result object'
|
|
79
|
-
)
|
|
98
|
+
veryfy_analyzed_result(result, node.__workflow_id__, "check_analyze")
|
|
80
99
|
result.fully_calibrated = False
|
|
81
100
|
else:
|
|
82
101
|
result = node.analyze(result, history=history)
|
|
83
|
-
|
|
84
|
-
raise TypeError(
|
|
85
|
-
f'"{node.__workflow_id__}" : "analyze" must return a Result object'
|
|
86
|
-
)
|
|
102
|
+
veryfy_analyzed_result(result, node.__workflow_id__, "analyze")
|
|
87
103
|
result.fully_calibrated = True
|
|
88
104
|
if plot:
|
|
89
105
|
call_plot(node, result)
|
|
@@ -108,7 +124,8 @@ def check_data(workflow: WorkflowType, code_path: str | Path,
|
|
|
108
124
|
|
|
109
125
|
if history is None:
|
|
110
126
|
logger.debug(f'No history found for "{workflow.__workflow_id__}"')
|
|
111
|
-
result = Result(
|
|
127
|
+
result = Result(workflow=workflow.__workflow_id__,
|
|
128
|
+
config_path=current_config(state_path))
|
|
112
129
|
result.in_spec = False
|
|
113
130
|
result.bad_data = False
|
|
114
131
|
return result
|
|
@@ -130,8 +147,13 @@ def check_data(workflow: WorkflowType, code_path: str | Path,
|
|
|
130
147
|
logger.debug(
|
|
131
148
|
f'Checking "{workflow.__workflow_id__}" with "check" method ...')
|
|
132
149
|
data = workflow.check()
|
|
133
|
-
|
|
134
|
-
|
|
150
|
+
if not is_pickleable(data):
|
|
151
|
+
raise TypeError(
|
|
152
|
+
f'"{workflow.__workflow_id__}" : "check" return not pickleable data'
|
|
153
|
+
)
|
|
154
|
+
result = Result(workflow=workflow.__workflow_id__,
|
|
155
|
+
data=data,
|
|
156
|
+
config_path=current_config(state_path))
|
|
135
157
|
#save_result(workflow.__workflow_id__, result, state_path)
|
|
136
158
|
|
|
137
159
|
logger.debug(f'Checked "{workflow.__workflow_id__}" !')
|
|
@@ -155,8 +177,13 @@ def check_data(workflow: WorkflowType, code_path: str | Path,
|
|
|
155
177
|
f'Checking "{workflow.__workflow_id__}" with "calibrate" method ...'
|
|
156
178
|
)
|
|
157
179
|
data = workflow.calibrate()
|
|
158
|
-
|
|
159
|
-
|
|
180
|
+
if not is_pickleable(data):
|
|
181
|
+
raise TypeError(
|
|
182
|
+
f'"{workflow.__workflow_id__}" : "calibrate" return not pickleable data'
|
|
183
|
+
)
|
|
184
|
+
result = Result(workflow=workflow.__workflow_id__,
|
|
185
|
+
data=data,
|
|
186
|
+
config_path=current_config(state_path))
|
|
160
187
|
save_result(workflow.__workflow_id__, result, state_path)
|
|
161
188
|
|
|
162
189
|
logger.debug(f'Calibrated "{workflow}" !')
|
|
@@ -180,8 +207,13 @@ def calibrate(workflow: WorkflowType, code_path: str | Path,
|
|
|
180
207
|
|
|
181
208
|
logger.debug(f'Calibrating "{workflow.__workflow_id__}" ...')
|
|
182
209
|
data = workflow.calibrate()
|
|
183
|
-
|
|
184
|
-
|
|
210
|
+
if not is_pickleable(data):
|
|
211
|
+
raise TypeError(
|
|
212
|
+
f'"{workflow.__workflow_id__}" : "calibrate" return not pickleable data'
|
|
213
|
+
)
|
|
214
|
+
result = Result(workflow=workflow.__workflow_id__,
|
|
215
|
+
data=data,
|
|
216
|
+
config_path=current_config(state_path))
|
|
185
217
|
save_result(workflow.__workflow_id__, result, state_path)
|
|
186
218
|
logger.debug(f'Calibrated "{workflow.__workflow_id__}" !')
|
|
187
219
|
result = call_analyzer(workflow, result, history, check=False, plot=plot)
|
|
@@ -243,7 +275,7 @@ def diagnose(workflow: WorkflowType, code_path: str | Path,
|
|
|
243
275
|
raise CalibrationFailedError(
|
|
244
276
|
f'"{workflow.__workflow_id__}": All dependents passed, but calibration failed!'
|
|
245
277
|
)
|
|
246
|
-
update_parameters(result)
|
|
278
|
+
update_parameters(result, state_path)
|
|
247
279
|
return True
|
|
248
280
|
|
|
249
281
|
|
|
@@ -299,7 +331,7 @@ def maintain(workflow: WorkflowType,
|
|
|
299
331
|
f'"{workflow.__workflow_id__}": All dependents passed, but calibration failed!'
|
|
300
332
|
)
|
|
301
333
|
if update:
|
|
302
|
-
update_parameters(result)
|
|
334
|
+
update_parameters(result, state_path)
|
|
303
335
|
return
|
|
304
336
|
|
|
305
337
|
|
|
@@ -321,5 +353,5 @@ def run(workflow: WorkflowType,
|
|
|
321
353
|
f'"{workflow.__workflow_id__}": All dependents passed, but calibration failed!'
|
|
322
354
|
)
|
|
323
355
|
if update:
|
|
324
|
-
update_parameters(result)
|
|
356
|
+
update_parameters(result, state_path)
|
|
325
357
|
return
|
|
@@ -3,6 +3,7 @@ import pickle
|
|
|
3
3
|
import uuid
|
|
4
4
|
from dataclasses import dataclass, field
|
|
5
5
|
from datetime import datetime, timedelta
|
|
6
|
+
from functools import lru_cache
|
|
6
7
|
from pathlib import Path
|
|
7
8
|
from typing import Any, Literal
|
|
8
9
|
|
|
@@ -13,6 +14,7 @@ from ..cli.config import get_config_value
|
|
|
13
14
|
|
|
14
15
|
@dataclass
|
|
15
16
|
class Result():
|
|
17
|
+
workflow: str = ''
|
|
16
18
|
in_spec: bool = False
|
|
17
19
|
bad_data: bool = False
|
|
18
20
|
fully_calibrated: bool = False
|
|
@@ -25,6 +27,7 @@ class Result():
|
|
|
25
27
|
index: int = -1
|
|
26
28
|
previous_path: Path | None = None
|
|
27
29
|
base_path: Path | None = None
|
|
30
|
+
config_path: Path | None = None
|
|
28
31
|
|
|
29
32
|
@property
|
|
30
33
|
def previous(self):
|
|
@@ -45,6 +48,14 @@ class Result():
|
|
|
45
48
|
state = 'Outdated'
|
|
46
49
|
return state
|
|
47
50
|
|
|
51
|
+
@property
|
|
52
|
+
def config(self):
|
|
53
|
+
from . import transform
|
|
54
|
+
if self.config_path is not None and self.base_path is not None:
|
|
55
|
+
return transform._load_config(self.config_path, self.base_path)
|
|
56
|
+
else:
|
|
57
|
+
return None
|
|
58
|
+
|
|
48
59
|
@state.setter
|
|
49
60
|
def state(self, state: Literal['OK', 'Bad', 'Outdated', 'In spec',
|
|
50
61
|
'Out of spec', 'Bad data']):
|
|
@@ -181,36 +192,42 @@ def get_heads(base_path: str | Path) -> Path | None:
|
|
|
181
192
|
def create_index(name: str,
|
|
182
193
|
base_path: str | Path,
|
|
183
194
|
context: str,
|
|
184
|
-
width: int,
|
|
195
|
+
width: int = -1,
|
|
185
196
|
start: int = 0):
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
197
|
+
|
|
198
|
+
path = Path(base_path) / "index" / name
|
|
199
|
+
if width == -1:
|
|
200
|
+
width = len(context)
|
|
190
201
|
else:
|
|
191
|
-
|
|
192
|
-
path.parent.mkdir(parents=True, exist_ok=True)
|
|
193
|
-
with open(path, "w") as f:
|
|
194
|
-
f.write(str(index + 1))
|
|
202
|
+
width = max(width, len(context))
|
|
195
203
|
|
|
196
|
-
path
|
|
197
|
-
|
|
198
|
-
|
|
204
|
+
if path.with_suffix('.width').exists():
|
|
205
|
+
width = int(path.with_suffix('.width').read_text())
|
|
206
|
+
index = int(path.with_suffix('.seq').read_text())
|
|
207
|
+
else:
|
|
208
|
+
index = start
|
|
209
|
+
if width < len(context):
|
|
210
|
+
raise ValueError(
|
|
211
|
+
f"Context '{context}' is too long, existing width of '{name}' is {width}."
|
|
212
|
+
)
|
|
213
|
+
if not path.with_suffix('.width').exists():
|
|
214
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
215
|
+
path.with_suffix('.width').write_text(str(width))
|
|
199
216
|
|
|
200
|
-
path
|
|
201
|
-
with open(path, "a") as f:
|
|
217
|
+
path.with_suffix('.seq').write_text(str(index + 1))
|
|
202
218
|
|
|
219
|
+
with path.with_suffix('.idx').open("a") as f:
|
|
203
220
|
f.write(f"{context.ljust(width)}\n")
|
|
204
221
|
|
|
205
222
|
return index
|
|
206
223
|
|
|
207
224
|
|
|
225
|
+
@lru_cache(maxsize=4096)
|
|
208
226
|
def query_index(name: str, base_path: str | Path, index: int):
|
|
209
|
-
path = Path(base_path) / "index" /
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
path
|
|
213
|
-
with open(path, "r") as f:
|
|
227
|
+
path = Path(base_path) / "index" / name
|
|
228
|
+
width = int(path.with_suffix('.width').read_text())
|
|
229
|
+
|
|
230
|
+
with path.with_suffix('.idx').open("r") as f:
|
|
214
231
|
f.seek(index * (width + 1))
|
|
215
232
|
context = f.read(width)
|
|
216
233
|
return context.rstrip()
|
|
@@ -219,5 +236,8 @@ def query_index(name: str, base_path: str | Path, index: int):
|
|
|
219
236
|
def get_result_by_index(
|
|
220
237
|
index: int, base_path: str | Path = get_config_value("data", Path)
|
|
221
238
|
) -> Result | None:
|
|
222
|
-
|
|
223
|
-
|
|
239
|
+
try:
|
|
240
|
+
path = query_index("result", base_path, index)
|
|
241
|
+
return load_result(path, base_path)
|
|
242
|
+
except:
|
|
243
|
+
return None
|
|
@@ -0,0 +1,122 @@
|
|
|
1
|
+
import functools
|
|
2
|
+
import hashlib
|
|
3
|
+
import lzma
|
|
4
|
+
import pickle
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
|
|
7
|
+
from .storage import Result
|
|
8
|
+
|
|
9
|
+
__config_id = None
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def _query_config(name: str, default=None):
|
|
13
|
+
import pickle
|
|
14
|
+
|
|
15
|
+
try:
|
|
16
|
+
with open('parameters.pkl', 'rb') as f:
|
|
17
|
+
parameters = pickle.load(f)
|
|
18
|
+
except:
|
|
19
|
+
parameters = {}
|
|
20
|
+
|
|
21
|
+
return parameters.get(name, default)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def _update_config(updates):
|
|
25
|
+
import pickle
|
|
26
|
+
|
|
27
|
+
try:
|
|
28
|
+
with open('parameters.pkl', 'rb') as f:
|
|
29
|
+
parameters = pickle.load(f)
|
|
30
|
+
except:
|
|
31
|
+
parameters = {}
|
|
32
|
+
|
|
33
|
+
for k, v in updates.items():
|
|
34
|
+
parameters[k] = v
|
|
35
|
+
|
|
36
|
+
with open('parameters.pkl', 'wb') as f:
|
|
37
|
+
pickle.dump(parameters, f)
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def _export_config() -> dict:
|
|
41
|
+
import pickle
|
|
42
|
+
|
|
43
|
+
try:
|
|
44
|
+
with open('parameters.pkl', 'rb') as f:
|
|
45
|
+
parameters = pickle.load(f)
|
|
46
|
+
except:
|
|
47
|
+
parameters = {}
|
|
48
|
+
|
|
49
|
+
return parameters
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def update_parameters(result: Result, data_path):
|
|
53
|
+
global __config_id
|
|
54
|
+
update_config(result.parameters)
|
|
55
|
+
cfg = export_config()
|
|
56
|
+
__config_id = _save_config(cfg, data_path)
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def current_config(data_path):
|
|
60
|
+
global __config_id
|
|
61
|
+
if __config_id is None:
|
|
62
|
+
cfg = export_config()
|
|
63
|
+
__config_id = _save_config(cfg, data_path)
|
|
64
|
+
return __config_id
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def _save_config(cfg, data_path):
|
|
68
|
+
global __config_id
|
|
69
|
+
i = 0
|
|
70
|
+
buf = pickle.dumps(cfg)
|
|
71
|
+
buf = lzma.compress(buf)
|
|
72
|
+
h = hashlib.md5(buf)
|
|
73
|
+
|
|
74
|
+
while True:
|
|
75
|
+
salt = f"{i:08d}".encode()
|
|
76
|
+
h.update(salt)
|
|
77
|
+
hashstr = h.hexdigest()
|
|
78
|
+
cfg_id = Path(hashstr[:2]) / hashstr[2:4] / hashstr[4:]
|
|
79
|
+
path = Path(data_path) / 'config' / cfg_id
|
|
80
|
+
if not path.exists():
|
|
81
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
82
|
+
with open(path, 'wb') as f:
|
|
83
|
+
f.write(buf)
|
|
84
|
+
break
|
|
85
|
+
elif path.read_bytes() == buf:
|
|
86
|
+
break
|
|
87
|
+
i += 1
|
|
88
|
+
__config_id = str(cfg_id)
|
|
89
|
+
return __config_id
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
@functools.lru_cache(maxsize=1024)
|
|
93
|
+
def _load_config(id, data_path):
|
|
94
|
+
path = Path(data_path) / 'config' / id
|
|
95
|
+
with open(path, 'rb') as f:
|
|
96
|
+
buf = f.read()
|
|
97
|
+
cfg = pickle.loads(lzma.decompress(buf))
|
|
98
|
+
return cfg
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
query_config = _query_config
|
|
102
|
+
update_config = _update_config
|
|
103
|
+
export_config = _export_config
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
def set_config_api(query_method, update_method, export_method):
|
|
107
|
+
"""
|
|
108
|
+
Set the query and update methods for the config.
|
|
109
|
+
|
|
110
|
+
Args:
|
|
111
|
+
query_method: The query method.
|
|
112
|
+
the method should take a key and return the value.
|
|
113
|
+
update_method: The update method.
|
|
114
|
+
the method should take a dict of updates.
|
|
115
|
+
"""
|
|
116
|
+
global query_config, update_config, export_config
|
|
117
|
+
|
|
118
|
+
query_config = query_method
|
|
119
|
+
update_config = update_method
|
|
120
|
+
export_config = export_method
|
|
121
|
+
|
|
122
|
+
return query_config, update_config, export_config
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
from pathlib import Path
|
|
2
2
|
|
|
3
|
+
from ..cli.config import get_config_value
|
|
3
4
|
from .load import load_workflow
|
|
4
5
|
|
|
5
6
|
|
|
@@ -133,3 +134,23 @@ def check_analyze(result: Result, history: Result | None = None) -> Result:
|
|
|
133
134
|
|
|
134
135
|
return result
|
|
135
136
|
"""
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
def debug_analyze(
|
|
140
|
+
result_index: int,
|
|
141
|
+
code_path: str | Path = get_config_value('code_path', Path),
|
|
142
|
+
data_path: str | Path = get_config_value('data_path', Path),
|
|
143
|
+
) -> None:
|
|
144
|
+
from .storage import get_result_by_index
|
|
145
|
+
|
|
146
|
+
result = get_result_by_index(result_index, data_path)
|
|
147
|
+
if result is None:
|
|
148
|
+
raise ValueError(f'Invalid result index: {result_index}')
|
|
149
|
+
workflow = result.workflow
|
|
150
|
+
wf = load_workflow(workflow, code_path)
|
|
151
|
+
if wf is None:
|
|
152
|
+
raise ValueError(f'Invalid workflow: {workflow}')
|
|
153
|
+
result = wf.analyze(result)
|
|
154
|
+
if hasattr(wf, 'plot'):
|
|
155
|
+
wf.plot(result)
|
|
156
|
+
return result
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = "2.6.0"
|
|
@@ -1,55 +0,0 @@
|
|
|
1
|
-
from .storage import Result
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
def _query_config(name: str, default=None):
|
|
5
|
-
import pickle
|
|
6
|
-
|
|
7
|
-
try:
|
|
8
|
-
with open('parameters.pkl', 'rb') as f:
|
|
9
|
-
parameters = pickle.load(f)
|
|
10
|
-
except:
|
|
11
|
-
parameters = {}
|
|
12
|
-
|
|
13
|
-
return parameters.get(name, default)
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
def _update_config(updates):
|
|
17
|
-
import pickle
|
|
18
|
-
|
|
19
|
-
try:
|
|
20
|
-
with open('parameters.pkl', 'rb') as f:
|
|
21
|
-
parameters = pickle.load(f)
|
|
22
|
-
except:
|
|
23
|
-
parameters = {}
|
|
24
|
-
|
|
25
|
-
for k, v in updates.items():
|
|
26
|
-
parameters[k] = v
|
|
27
|
-
|
|
28
|
-
with open('parameters.pkl', 'wb') as f:
|
|
29
|
-
pickle.dump(parameters, f)
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
def update_parameters(result: Result):
|
|
33
|
-
update_config(result.parameters)
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
query_config = _query_config
|
|
37
|
-
update_config = _update_config
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
def set_config_api(query_method, update_method):
|
|
41
|
-
"""
|
|
42
|
-
Set the query and update methods for the config.
|
|
43
|
-
|
|
44
|
-
Args:
|
|
45
|
-
query_method: The query method.
|
|
46
|
-
the method should take a key and return the value.
|
|
47
|
-
update_method: The update method.
|
|
48
|
-
the method should take a dict of updates.
|
|
49
|
-
"""
|
|
50
|
-
global query_config, update_config
|
|
51
|
-
|
|
52
|
-
query_config = query_method
|
|
53
|
-
update_config = update_method
|
|
54
|
-
|
|
55
|
-
return query_config, update_config
|
qulab-2.5.4/qulab/version.py
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
__version__ = "2.5.4"
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|