QuLab 2.1.2__cp310-cp310-win_amd64.whl → 2.1.4__cp310-cp310-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {QuLab-2.1.2.dist-info → QuLab-2.1.4.dist-info}/METADATA +1 -1
- {QuLab-2.1.2.dist-info → QuLab-2.1.4.dist-info}/RECORD +16 -17
- qulab/__main__.py +0 -2
- qulab/fun.cp310-win_amd64.pyd +0 -0
- qulab/scan/curd.py +78 -1
- qulab/scan/models.py +21 -6
- qulab/scan/query.py +1 -1
- qulab/scan/record.py +44 -0
- qulab/scan/scan.py +131 -8
- qulab/scan/server.py +233 -17
- qulab/sys/rpc/zmq_socket.py +7 -1
- qulab/version.py +1 -1
- qulab/scan/recorder.py +0 -247
- {QuLab-2.1.2.dist-info → QuLab-2.1.4.dist-info}/LICENSE +0 -0
- {QuLab-2.1.2.dist-info → QuLab-2.1.4.dist-info}/WHEEL +0 -0
- {QuLab-2.1.2.dist-info → QuLab-2.1.4.dist-info}/entry_points.txt +0 -0
- {QuLab-2.1.2.dist-info → QuLab-2.1.4.dist-info}/top_level.txt +0 -0
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
qulab/__init__.py,sha256=vkFybY8YSsQilYdThPRD83-btPAR41sy_WCXiM-6mME,141
|
|
2
|
-
qulab/__main__.py,sha256=
|
|
3
|
-
qulab/fun.cp310-win_amd64.pyd,sha256=
|
|
4
|
-
qulab/version.py,sha256=
|
|
2
|
+
qulab/__main__.py,sha256=V7iokU7awstgjCeiF_hoOdFyrqJwC_4QetiLe7cWvOQ,454
|
|
3
|
+
qulab/fun.cp310-win_amd64.pyd,sha256=lh3YOyjxJQ9tKHaFrwflujhYyxtjzBAShTvy8SitKxE,31744
|
|
4
|
+
qulab/version.py,sha256=HVJYMyJeujBoXcNkL2sXW9oxWN275q3pivrlKw3Hm3s,21
|
|
5
5
|
qulab/monitor/__init__.py,sha256=xEVDkJF8issrsDeLqQmDsvtRmrf-UiViFcGTWuzdlFU,43
|
|
6
6
|
qulab/monitor/__main__.py,sha256=k2H1H5Zf9LLXTDLISJkbikLH-z0f1e5i5i6wXXYPOrE,105
|
|
7
7
|
qulab/monitor/config.py,sha256=y_5StMkdrbZO1ziyKBrvIkB7Jclp9RCPK1QbsOhCxnY,785
|
|
@@ -13,15 +13,14 @@ qulab/monitor/ploter.py,sha256=dg7W28XTwEbBxHVtdPkFV135OQgoQwTi-NJCZQF-HYU,3724
|
|
|
13
13
|
qulab/monitor/qt_compat.py,sha256=Eq7zlA4_XstB92NhtAqebtWU_Btw4lcwFO30YxZ-TPE,804
|
|
14
14
|
qulab/monitor/toolbar.py,sha256=HxqG6ywKFyQJM2Q1s7SnhuzjbyeROczAZKwxztD1WJ8,8213
|
|
15
15
|
qulab/scan/__init__.py,sha256=RR_0NQcr8Mi3vpWdypydbijQ1rXA0D3DEidQ7xjNslM,133
|
|
16
|
-
qulab/scan/curd.py,sha256=
|
|
16
|
+
qulab/scan/curd.py,sha256=yaTglGiS6mlk0GqDHi_w8T02XGBMvDZtXSdML7zDywk,7117
|
|
17
17
|
qulab/scan/expression.py,sha256=vwUM9E0OFQal4bljlUtLR3NJu4zGRyuWYrdyZSs3QTU,16199
|
|
18
|
-
qulab/scan/models.py,sha256=
|
|
18
|
+
qulab/scan/models.py,sha256=ZvXkJEt5Yz3Sjx0JKzYka-q2Uo-w_iVzHgH8A6DbjF0,18236
|
|
19
19
|
qulab/scan/optimize.py,sha256=MlT4y422CnP961IR384UKryyZh8riNvrPSd2z_MXLEg,2356
|
|
20
|
-
qulab/scan/query.py,sha256=
|
|
21
|
-
qulab/scan/record.py,sha256=
|
|
22
|
-
qulab/scan/
|
|
23
|
-
qulab/scan/
|
|
24
|
-
qulab/scan/server.py,sha256=Q7lX9ms37WbnzuLhrv3YbNrusTsxQUNT9slFqon5VL0,2872
|
|
20
|
+
qulab/scan/query.py,sha256=WZeTiEtycP4raqzIUEP6tNg-OMaQYfRCxcdyYP5Swyg,11819
|
|
21
|
+
qulab/scan/record.py,sha256=K8h8psiT_xgHZem9HRwzEgmr9lE0dGYHjlfK11sHldw,19111
|
|
22
|
+
qulab/scan/scan.py,sha256=hBl9BjKfH-PoofhtpaD-O6s37wxy88hBN59GUXAcAuo,34877
|
|
23
|
+
qulab/scan/server.py,sha256=e7AqFmaN9924VYrbvyKwPrVBzqPcnXbT8YwexAN_ZjQ,11911
|
|
25
24
|
qulab/scan/space.py,sha256=OPceWIIrb2KDDQaSxD3Vkzkf9NDSuqWuQoDnKiWqtAo,5381
|
|
26
25
|
qulab/scan/utils.py,sha256=30qnYvyFyctwcWxOCUpCNxXgGysA7xdIDzYbjwxGUzA,3744
|
|
27
26
|
qulab/storage/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -71,7 +70,7 @@ qulab/sys/rpc/server.py,sha256=W3bPwe8um1IeR_3HLx-ad6iCcbeuUQcSg11Ze4w6DJg,742
|
|
|
71
70
|
qulab/sys/rpc/socket.py,sha256=W3bPwe8um1IeR_3HLx-ad6iCcbeuUQcSg11Ze4w6DJg,742
|
|
72
71
|
qulab/sys/rpc/utils.py,sha256=BurIcqh8CS-Hsk1dYP6IiefK4qHivaEqD9_rBY083SA,619
|
|
73
72
|
qulab/sys/rpc/worker.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
74
|
-
qulab/sys/rpc/zmq_socket.py,sha256=
|
|
73
|
+
qulab/sys/rpc/zmq_socket.py,sha256=SHMCp5oawYglfawuvDDTJJrQtxFpErlBtvoEwd61OO0,8362
|
|
75
74
|
qulab/visualization/__init__.py,sha256=Bkt9AK5c45d6HFLlT-f8cIppywXziHtJqhDtVxOoKKo,6317
|
|
76
75
|
qulab/visualization/__main__.py,sha256=WduINFl21B-XMsi2rg2cVhIyU11hKKX3zOsjc56QLiQ,1710
|
|
77
76
|
qulab/visualization/_autoplot.py,sha256=JFLR3e71bryPaUkOCWFzD9C4Nk-ulqIdBb1YN4QnZic,14562
|
|
@@ -79,9 +78,9 @@ qulab/visualization/plot_layout.py,sha256=yAnMONOms7_szCdng-8wPpUMPis5UnbaNNzV4K
|
|
|
79
78
|
qulab/visualization/plot_seq.py,sha256=h9D0Yl_yO64IwlvBgzMu9EBKr9gg6y8QE55gu2PfTns,2783
|
|
80
79
|
qulab/visualization/qdat.py,sha256=HubXFu4nfcA7iUzghJGle1C86G6221hicLR0b-GqhKQ,5887
|
|
81
80
|
qulab/visualization/widgets.py,sha256=HcYwdhDtLreJiYaZuN3LfofjJmZcLwjMfP5aasebgDo,3266
|
|
82
|
-
QuLab-2.1.
|
|
83
|
-
QuLab-2.1.
|
|
84
|
-
QuLab-2.1.
|
|
85
|
-
QuLab-2.1.
|
|
86
|
-
QuLab-2.1.
|
|
87
|
-
QuLab-2.1.
|
|
81
|
+
QuLab-2.1.4.dist-info/LICENSE,sha256=b4NRQ-GFVpJMT7RuExW3NwhfbrYsX7AcdB7Gudok-fs,1086
|
|
82
|
+
QuLab-2.1.4.dist-info/METADATA,sha256=viS-wETLJCqd-ugZW2Ac_Z_pl1_BGi8KbBXhrlxj2qs,3609
|
|
83
|
+
QuLab-2.1.4.dist-info/WHEEL,sha256=lO6CqtLHCAi38X3Es1a4R1lAjZFvN010IMRCFo2S7Mc,102
|
|
84
|
+
QuLab-2.1.4.dist-info/entry_points.txt,sha256=ohBzutEnQimP_BZWiuXdSliu4QAYSHHcN0PZD8c7ZCY,46
|
|
85
|
+
QuLab-2.1.4.dist-info/top_level.txt,sha256=3T886LbAsbvjonu_TDdmgxKYUn939BVTRPxPl9r4cEg,6
|
|
86
|
+
QuLab-2.1.4.dist-info/RECORD,,
|
qulab/__main__.py
CHANGED
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
import click
|
|
2
2
|
|
|
3
3
|
from .monitor.__main__ import main as monitor
|
|
4
|
-
from .scan.recorder import record
|
|
5
4
|
from .scan.server import server
|
|
6
5
|
from .sys.net.cli import dht
|
|
7
6
|
from .visualization.__main__ import plot
|
|
@@ -21,7 +20,6 @@ def hello():
|
|
|
21
20
|
main.add_command(monitor)
|
|
22
21
|
main.add_command(plot)
|
|
23
22
|
main.add_command(dht)
|
|
24
|
-
main.add_command(record)
|
|
25
23
|
main.add_command(server)
|
|
26
24
|
|
|
27
25
|
if __name__ == '__main__':
|
qulab/fun.cp310-win_amd64.pyd
CHANGED
|
Binary file
|
qulab/scan/curd.py
CHANGED
|
@@ -1,4 +1,7 @@
|
|
|
1
|
+
import lzma
|
|
2
|
+
import pickle
|
|
1
3
|
from datetime import date, datetime, timezone
|
|
4
|
+
from pathlib import Path
|
|
2
5
|
from typing import Sequence, Type, Union
|
|
3
6
|
|
|
4
7
|
from sqlalchemy.orm import Query, Session, aliased
|
|
@@ -6,7 +9,8 @@ from sqlalchemy.orm.exc import NoResultFound
|
|
|
6
9
|
from sqlalchemy.orm.session import Session
|
|
7
10
|
from waveforms.dicttree import foldDict
|
|
8
11
|
|
|
9
|
-
from .models import Comment,
|
|
12
|
+
from .models import (Cell, Comment, Config, InputText, Notebook, Record,
|
|
13
|
+
Report, Sample, Tag, utcnow)
|
|
10
14
|
|
|
11
15
|
|
|
12
16
|
def tag(session: Session, tag_text: str) -> Tag:
|
|
@@ -142,3 +146,76 @@ def remove_tags(session: Session, record_id: int, tags: Sequence[str]):
|
|
|
142
146
|
session.rollback()
|
|
143
147
|
return False
|
|
144
148
|
return True
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
def create_notebook(session: Session, notebook_name: str) -> Notebook:
|
|
152
|
+
"""Create a notebook in the database."""
|
|
153
|
+
notebook = Notebook(name=notebook_name)
|
|
154
|
+
session.add(notebook)
|
|
155
|
+
return notebook
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
def create_input_text(session: Session, input_text: str) -> InputText:
|
|
159
|
+
"""Create an input text in the database."""
|
|
160
|
+
input = InputText()
|
|
161
|
+
input.text = input_text
|
|
162
|
+
try:
|
|
163
|
+
input = session.query(InputText).filter(
|
|
164
|
+
InputText.hash == input.hash,
|
|
165
|
+
InputText.text_field == input_text).one()
|
|
166
|
+
except NoResultFound:
|
|
167
|
+
session.add(input)
|
|
168
|
+
return input
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
def create_cell(session: Session, notebook: Notebook, input_text: str) -> Cell:
|
|
172
|
+
"""Create a cell in the database."""
|
|
173
|
+
cell = Cell()
|
|
174
|
+
cell.notebook = notebook
|
|
175
|
+
cell.input = create_input_text(session, input_text)
|
|
176
|
+
cell.index = len(notebook.cells) - 1
|
|
177
|
+
session.add(cell)
|
|
178
|
+
notebook.atime = cell.ctime
|
|
179
|
+
return cell
|
|
180
|
+
|
|
181
|
+
|
|
182
|
+
def create_config(session: Session, config: dict | bytes, base: Path,
|
|
183
|
+
filename: str) -> Config:
|
|
184
|
+
"""Create a config in the database."""
|
|
185
|
+
|
|
186
|
+
if not isinstance(config, bytes):
|
|
187
|
+
buf = pickle.dumps(config)
|
|
188
|
+
buf = lzma.compress(buf)
|
|
189
|
+
content_type = 'application/pickle+lzma'
|
|
190
|
+
else:
|
|
191
|
+
buf = config
|
|
192
|
+
content_type = 'application/octet-stream'
|
|
193
|
+
config = Config(buf)
|
|
194
|
+
config.content_type = content_type
|
|
195
|
+
for cfg in session.query(Config).filter(Config.hash == config.hash).all():
|
|
196
|
+
with open(base / cfg.file, 'rb') as f:
|
|
197
|
+
if f.read() == buf:
|
|
198
|
+
cfg.atime = utcnow()
|
|
199
|
+
return cfg
|
|
200
|
+
else:
|
|
201
|
+
path = base / filename
|
|
202
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
203
|
+
with open(path, 'wb') as f:
|
|
204
|
+
f.write(buf)
|
|
205
|
+
config.file = filename
|
|
206
|
+
session.add(config)
|
|
207
|
+
return config
|
|
208
|
+
|
|
209
|
+
|
|
210
|
+
def get_config(session: Session, config_id: int, base: Path):
|
|
211
|
+
config = session.get(Config, config_id)
|
|
212
|
+
if config is None:
|
|
213
|
+
return None
|
|
214
|
+
config.atime = utcnow()
|
|
215
|
+
path = base / config.file
|
|
216
|
+
with open(path, 'rb') as f:
|
|
217
|
+
buf = f.read()
|
|
218
|
+
if config.content_type == 'application/pickle+lzma':
|
|
219
|
+
buf = lzma.decompress(buf)
|
|
220
|
+
buf = pickle.loads(buf)
|
|
221
|
+
return buf
|
qulab/scan/models.py
CHANGED
|
@@ -1,11 +1,9 @@
|
|
|
1
1
|
import hashlib
|
|
2
2
|
import pickle
|
|
3
|
-
import time
|
|
4
3
|
from datetime import datetime, timezone
|
|
5
4
|
from functools import singledispatchmethod
|
|
6
|
-
from typing import Optional
|
|
7
5
|
|
|
8
|
-
from sqlalchemy import (
|
|
6
|
+
from sqlalchemy import (Column, DateTime, Float, ForeignKey, Integer,
|
|
9
7
|
LargeBinary, String, Table, Text, create_engine)
|
|
10
8
|
from sqlalchemy.orm import (backref, declarative_base, relationship,
|
|
11
9
|
sessionmaker)
|
|
@@ -325,7 +323,7 @@ class InputText(Base):
|
|
|
325
323
|
__tablename__ = 'inputs'
|
|
326
324
|
|
|
327
325
|
id = Column(Integer, primary_key=True)
|
|
328
|
-
hash = Column(LargeBinary(20))
|
|
326
|
+
hash = Column(LargeBinary(20), index=True)
|
|
329
327
|
text_field = Column(Text, unique=True)
|
|
330
328
|
|
|
331
329
|
@property
|
|
@@ -432,6 +430,22 @@ class SampleTransfer(Base):
|
|
|
432
430
|
comments = relationship("Comment", secondary=sample_transfer_comments)
|
|
433
431
|
|
|
434
432
|
|
|
433
|
+
class Config(Base):
|
|
434
|
+
__tablename__ = 'configs'
|
|
435
|
+
|
|
436
|
+
id = Column(Integer, primary_key=True)
|
|
437
|
+
hash = Column(LargeBinary(20), index=True)
|
|
438
|
+
file = Column(String)
|
|
439
|
+
content_type = Column(String, default='application/pickle')
|
|
440
|
+
ctime = Column(DateTime, default=utcnow)
|
|
441
|
+
atime = Column(DateTime, default=utcnow)
|
|
442
|
+
|
|
443
|
+
records = relationship("Record", back_populates="config")
|
|
444
|
+
|
|
445
|
+
def __init__(self, data: bytes) -> None:
|
|
446
|
+
self.hash = hashlib.sha1(data).digest()
|
|
447
|
+
|
|
448
|
+
|
|
435
449
|
class Record(Base):
|
|
436
450
|
__tablename__ = 'records'
|
|
437
451
|
|
|
@@ -440,14 +454,14 @@ class Record(Base):
|
|
|
440
454
|
mtime = Column(DateTime, default=utcnow)
|
|
441
455
|
atime = Column(DateTime, default=utcnow)
|
|
442
456
|
user_id = Column(Integer, ForeignKey('users.id'))
|
|
457
|
+
config_id = Column(Integer, ForeignKey('configs.id'))
|
|
443
458
|
parent_id = Column(Integer, ForeignKey('records.id'))
|
|
444
459
|
cell_id = Column(Integer, ForeignKey('cells.id'))
|
|
445
460
|
|
|
446
461
|
app = Column(String)
|
|
447
462
|
file = Column(String)
|
|
463
|
+
content_type = Column(String, default='application/pickle')
|
|
448
464
|
key = Column(String)
|
|
449
|
-
config = Column(JSON)
|
|
450
|
-
task_hash = Column(LargeBinary(32))
|
|
451
465
|
|
|
452
466
|
parent = relationship("Record",
|
|
453
467
|
remote_side=[id],
|
|
@@ -456,6 +470,7 @@ class Record(Base):
|
|
|
456
470
|
remote_side=[parent_id],
|
|
457
471
|
back_populates="parent")
|
|
458
472
|
|
|
473
|
+
config = relationship("Config", back_populates="records")
|
|
459
474
|
user = relationship("User")
|
|
460
475
|
samples = relationship("Sample",
|
|
461
476
|
secondary=sample_records,
|
qulab/scan/query.py
CHANGED
qulab/scan/record.py
CHANGED
|
@@ -95,6 +95,11 @@ class BufferList():
|
|
|
95
95
|
dill.dump(item, f)
|
|
96
96
|
self._list.clear()
|
|
97
97
|
|
|
98
|
+
def delete(self):
|
|
99
|
+
if isinstance(self.file, Path):
|
|
100
|
+
self.file.unlink()
|
|
101
|
+
self.file = None
|
|
102
|
+
|
|
98
103
|
def append(self, pos, value, dims=None):
|
|
99
104
|
if dims is not None:
|
|
100
105
|
if any([p != 0 for i, p in enumerate(pos) if i not in dims]):
|
|
@@ -450,6 +455,20 @@ class Record():
|
|
|
450
455
|
with open(self._file, 'wb') as f:
|
|
451
456
|
dill.dump(self, f)
|
|
452
457
|
|
|
458
|
+
def delete(self):
|
|
459
|
+
if self.is_remote_record():
|
|
460
|
+
with ZMQContextManager(zmq.DEALER,
|
|
461
|
+
connect=self.database) as socket:
|
|
462
|
+
socket.send_pyobj({
|
|
463
|
+
'method': 'record_delete',
|
|
464
|
+
'record_id': self.id
|
|
465
|
+
})
|
|
466
|
+
elif self.is_local_record():
|
|
467
|
+
for key, value in self._items.items():
|
|
468
|
+
if isinstance(value, BufferList):
|
|
469
|
+
value.delete()
|
|
470
|
+
self._file.unlink()
|
|
471
|
+
|
|
453
472
|
def export(self, file):
|
|
454
473
|
with zipfile.ZipFile(file,
|
|
455
474
|
'w',
|
|
@@ -470,8 +489,33 @@ class Record():
|
|
|
470
489
|
else:
|
|
471
490
|
items[key] = value
|
|
472
491
|
with z.open('record.pkl', 'w') as f:
|
|
492
|
+
self.description['entry']['scripts'] = self.scripts()
|
|
473
493
|
dill.dump((self.description, items), f)
|
|
474
494
|
|
|
495
|
+
def scripts(self, session=None):
|
|
496
|
+
scripts = self.description['entry']['scripts']
|
|
497
|
+
if isinstance(scripts, list):
|
|
498
|
+
return scripts
|
|
499
|
+
else:
|
|
500
|
+
cell_id = scripts
|
|
501
|
+
|
|
502
|
+
if self.is_remote_record():
|
|
503
|
+
with ZMQContextManager(zmq.DEALER,
|
|
504
|
+
connect=self.database) as socket:
|
|
505
|
+
socket.send_pyobj({
|
|
506
|
+
'method': 'notebook_history',
|
|
507
|
+
'cell_id': cell_id
|
|
508
|
+
})
|
|
509
|
+
return socket.recv_pyobj()
|
|
510
|
+
elif self.is_local_record():
|
|
511
|
+
from .models import Cell
|
|
512
|
+
assert session is not None, "session is required for local record"
|
|
513
|
+
cell = session.get(Cell, cell_id)
|
|
514
|
+
return [
|
|
515
|
+
cell.input.text
|
|
516
|
+
for cell in cell.notebook.cells[1:cell.index + 2]
|
|
517
|
+
]
|
|
518
|
+
|
|
475
519
|
@classmethod
|
|
476
520
|
def load(cls, file: str):
|
|
477
521
|
with zipfile.ZipFile(file, 'r') as z:
|
qulab/scan/scan.py
CHANGED
|
@@ -2,8 +2,12 @@ import asyncio
|
|
|
2
2
|
import copy
|
|
3
3
|
import inspect
|
|
4
4
|
import itertools
|
|
5
|
+
import lzma
|
|
5
6
|
import os
|
|
7
|
+
import pickle
|
|
8
|
+
import platform
|
|
6
9
|
import re
|
|
10
|
+
import subprocess
|
|
7
11
|
import sys
|
|
8
12
|
import uuid
|
|
9
13
|
from concurrent.futures import ProcessPoolExecutor
|
|
@@ -19,7 +23,7 @@ from ..sys.rpc.zmq_socket import ZMQContextManager
|
|
|
19
23
|
from .expression import Env, Expression, Symbol
|
|
20
24
|
from .optimize import NgOptimizer
|
|
21
25
|
from .record import Record
|
|
22
|
-
from .
|
|
26
|
+
from .server import default_record_port
|
|
23
27
|
from .space import Optimizer, OptimizeSpace, Space
|
|
24
28
|
from .utils import async_zip, call_function, dump_globals
|
|
25
29
|
|
|
@@ -41,6 +45,7 @@ except:
|
|
|
41
45
|
|
|
42
46
|
__process_uuid = uuid.uuid1()
|
|
43
47
|
__task_counter = itertools.count()
|
|
48
|
+
__notebook_id = None
|
|
44
49
|
|
|
45
50
|
if os.getenv('QULAB_SERVER'):
|
|
46
51
|
default_server = os.getenv('QULAB_SERVER')
|
|
@@ -52,6 +57,105 @@ else:
|
|
|
52
57
|
default_executor = default_server
|
|
53
58
|
|
|
54
59
|
|
|
60
|
+
def yapf_reformat(cell_text):
|
|
61
|
+
try:
|
|
62
|
+
import isort
|
|
63
|
+
import yapf.yapflib.yapf_api
|
|
64
|
+
|
|
65
|
+
fname = f"f{uuid.uuid1().hex}"
|
|
66
|
+
|
|
67
|
+
def wrap(source):
|
|
68
|
+
lines = [f"async def {fname}():"]
|
|
69
|
+
for line in source.split('\n'):
|
|
70
|
+
lines.append(" " + line)
|
|
71
|
+
return '\n'.join(lines)
|
|
72
|
+
|
|
73
|
+
def unwrap(source):
|
|
74
|
+
lines = []
|
|
75
|
+
for line in source.split('\n'):
|
|
76
|
+
if line.startswith(f"async def {fname}():"):
|
|
77
|
+
continue
|
|
78
|
+
lines.append(line[4:])
|
|
79
|
+
return '\n'.join(lines)
|
|
80
|
+
|
|
81
|
+
cell_text = re.sub('^%', '#%#', cell_text, flags=re.M)
|
|
82
|
+
reformated_text = unwrap(
|
|
83
|
+
yapf.yapflib.yapf_api.FormatCode(wrap(isort.code(cell_text)))[0])
|
|
84
|
+
return re.sub('^#%#', '%', reformated_text, flags=re.M)
|
|
85
|
+
except:
|
|
86
|
+
return cell_text
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def get_installed_packages():
|
|
90
|
+
result = subprocess.run([sys.executable, '-m', 'pip', 'freeze'],
|
|
91
|
+
stdout=subprocess.PIPE,
|
|
92
|
+
text=True)
|
|
93
|
+
|
|
94
|
+
lines = result.stdout.split('\n')
|
|
95
|
+
packages = []
|
|
96
|
+
for line in lines:
|
|
97
|
+
if line:
|
|
98
|
+
packages.append(line)
|
|
99
|
+
return packages
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
def get_system_info():
|
|
103
|
+
info = {
|
|
104
|
+
'OS': platform.uname()._asdict(),
|
|
105
|
+
'Python': sys.version,
|
|
106
|
+
'PythonExecutable': sys.executable,
|
|
107
|
+
'PythonPath': sys.path,
|
|
108
|
+
'packages': get_installed_packages()
|
|
109
|
+
}
|
|
110
|
+
return info
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
def current_notebook():
|
|
114
|
+
return __notebook_id
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
async def create_notebook(name: str, database=default_server, socket=None):
|
|
118
|
+
global __notebook_id
|
|
119
|
+
|
|
120
|
+
async with ZMQContextManager(zmq.DEALER, connect=database,
|
|
121
|
+
socket=socket) as socket:
|
|
122
|
+
await socket.send_pyobj({'method': 'notebook_create', 'name': name})
|
|
123
|
+
__notebook_id = await socket.recv_pyobj()
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
async def save_input_cells(notebook_id,
|
|
127
|
+
input_cells,
|
|
128
|
+
database=default_server,
|
|
129
|
+
socket=None):
|
|
130
|
+
async with ZMQContextManager(zmq.DEALER, connect=database,
|
|
131
|
+
socket=socket) as socket:
|
|
132
|
+
await socket.send_pyobj({
|
|
133
|
+
'method': 'notebook_extend',
|
|
134
|
+
'notebook_id': notebook_id,
|
|
135
|
+
'input_cells': input_cells
|
|
136
|
+
})
|
|
137
|
+
return await socket.recv_pyobj()
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
async def create_config(config: dict, database=default_server, socket=None):
|
|
141
|
+
async with ZMQContextManager(zmq.DEALER, connect=database,
|
|
142
|
+
socket=socket) as socket:
|
|
143
|
+
buf = lzma.compress(pickle.dumps(config))
|
|
144
|
+
await socket.send_pyobj({'method': 'config_update', 'update': buf})
|
|
145
|
+
return await socket.recv_pyobj()
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
async def get_config(config_id: int, database=default_server, socket=None):
|
|
149
|
+
async with ZMQContextManager(zmq.DEALER, connect=database,
|
|
150
|
+
socket=socket) as socket:
|
|
151
|
+
await socket.send_pyobj({
|
|
152
|
+
'method': 'config_get',
|
|
153
|
+
'config_id': config_id
|
|
154
|
+
})
|
|
155
|
+
buf = await socket.recv_pyobj()
|
|
156
|
+
return pickle.loads(lzma.decompress(buf))
|
|
157
|
+
|
|
158
|
+
|
|
55
159
|
def task_uuid():
|
|
56
160
|
return uuid.uuid3(__process_uuid, str(next(__task_counter)))
|
|
57
161
|
|
|
@@ -135,10 +239,11 @@ class Scan():
|
|
|
135
239
|
mixin=None):
|
|
136
240
|
self.id = task_uuid()
|
|
137
241
|
self.record = None
|
|
138
|
-
self.
|
|
242
|
+
self.config = None
|
|
139
243
|
self.description = {
|
|
140
244
|
'app': app,
|
|
141
245
|
'tags': tags,
|
|
246
|
+
'config': None,
|
|
142
247
|
'loops': {},
|
|
143
248
|
'intrinsic_loops': {},
|
|
144
249
|
'consts': {},
|
|
@@ -157,9 +262,11 @@ class Scan():
|
|
|
157
262
|
'database': database,
|
|
158
263
|
'hiden': ['self', r'^__.*', r'.*__$'],
|
|
159
264
|
'entry': {
|
|
265
|
+
'system': get_system_info(),
|
|
160
266
|
'env': {},
|
|
161
267
|
'shell': '',
|
|
162
|
-
'cmds': []
|
|
268
|
+
'cmds': [],
|
|
269
|
+
'scripts': []
|
|
163
270
|
},
|
|
164
271
|
}
|
|
165
272
|
self._current_level = 0
|
|
@@ -248,8 +355,7 @@ class Scan():
|
|
|
248
355
|
|
|
249
356
|
def emit(self, current_level, step, position, variables: dict[str, Any]):
|
|
250
357
|
self._msg_queue.put_nowait(
|
|
251
|
-
|
|
252
|
-
self._emit(current_level, step, position, variables.copy())))
|
|
358
|
+
self._emit(current_level, step, position, variables.copy()))
|
|
253
359
|
|
|
254
360
|
def hide(self, name: str):
|
|
255
361
|
self.description['hiden'].append(name)
|
|
@@ -287,8 +393,6 @@ class Scan():
|
|
|
287
393
|
def get(self, name: str):
|
|
288
394
|
if name in self.description['consts']:
|
|
289
395
|
return self.description['consts'][name]
|
|
290
|
-
elif name in self.namespace:
|
|
291
|
-
return self.namespace.get(name)
|
|
292
396
|
else:
|
|
293
397
|
return Symbol(name)
|
|
294
398
|
|
|
@@ -442,6 +546,17 @@ class Scan():
|
|
|
442
546
|
|
|
443
547
|
async def run(self):
|
|
444
548
|
assymbly(self.description)
|
|
549
|
+
if self.config:
|
|
550
|
+
self.description['config'] = await create_config(
|
|
551
|
+
self.config, self.description['database'], self._sock)
|
|
552
|
+
if current_notebook() is None:
|
|
553
|
+
await create_notebook('untitle', self.description['database'],
|
|
554
|
+
self._sock)
|
|
555
|
+
cell_id = await save_input_cells(current_notebook(),
|
|
556
|
+
self.description['entry']['scripts'],
|
|
557
|
+
self.description['database'],
|
|
558
|
+
self._sock)
|
|
559
|
+
self.description['entry']['scripts'] = cell_id
|
|
445
560
|
if isinstance(
|
|
446
561
|
self.description['database'],
|
|
447
562
|
str) and self.description['database'].startswith("tcp://"):
|
|
@@ -635,13 +750,21 @@ def assymbly(description):
|
|
|
635
750
|
ipy = get_ipython()
|
|
636
751
|
if ipy is not None:
|
|
637
752
|
description['entry']['shell'] = 'ipython'
|
|
638
|
-
description['entry']['
|
|
753
|
+
description['entry']['scripts'] = [
|
|
754
|
+
yapf_reformat(cell_text) for cell_text in ipy.user_ns['In']
|
|
755
|
+
]
|
|
639
756
|
else:
|
|
640
757
|
try:
|
|
641
758
|
description['entry']['shell'] = 'shell'
|
|
642
759
|
description['entry']['cmds'] = [
|
|
643
760
|
sys.executable, __main__.__file__, *sys.argv[1:]
|
|
644
761
|
]
|
|
762
|
+
description['entry']['scripts'] = []
|
|
763
|
+
try:
|
|
764
|
+
with open(__main__.__file__) as f:
|
|
765
|
+
description['entry']['scripts'].append(f.read())
|
|
766
|
+
except:
|
|
767
|
+
pass
|
|
645
768
|
except:
|
|
646
769
|
pass
|
|
647
770
|
|
qulab/scan/server.py
CHANGED
|
@@ -1,5 +1,8 @@
|
|
|
1
1
|
import asyncio
|
|
2
|
+
import os
|
|
2
3
|
import pickle
|
|
4
|
+
import time
|
|
5
|
+
from pathlib import Path
|
|
3
6
|
|
|
4
7
|
import click
|
|
5
8
|
import dill
|
|
@@ -8,7 +11,26 @@ from loguru import logger
|
|
|
8
11
|
|
|
9
12
|
from qulab.sys.rpc.zmq_socket import ZMQContextManager
|
|
10
13
|
|
|
11
|
-
from .
|
|
14
|
+
from .curd import (create_cell, create_config, create_notebook, get_config,
|
|
15
|
+
query_record, remove_tags, tag, update_tags)
|
|
16
|
+
from .models import Cell, Notebook
|
|
17
|
+
from .models import Record as RecordInDB
|
|
18
|
+
from .models import Session, create_engine, create_tables, sessionmaker, utcnow
|
|
19
|
+
from .record import BufferList, Record, random_path
|
|
20
|
+
|
|
21
|
+
try:
|
|
22
|
+
default_record_port = int(os.getenv('QULAB_RECORD_PORT', 6789))
|
|
23
|
+
except:
|
|
24
|
+
default_record_port = 6789
|
|
25
|
+
|
|
26
|
+
if os.getenv('QULAB_RECORD_PATH'):
|
|
27
|
+
datapath = Path(os.getenv('QULAB_RECORD_PATH'))
|
|
28
|
+
else:
|
|
29
|
+
datapath = Path.home() / 'qulab' / 'data'
|
|
30
|
+
datapath.mkdir(parents=True, exist_ok=True)
|
|
31
|
+
|
|
32
|
+
record_cache = {}
|
|
33
|
+
CACHE_SIZE = 1024
|
|
12
34
|
|
|
13
35
|
pool = {}
|
|
14
36
|
|
|
@@ -27,15 +49,177 @@ async def reply(req, resp):
|
|
|
27
49
|
await req.sock.send_multipart([req.identity, pickle.dumps(resp)])
|
|
28
50
|
|
|
29
51
|
|
|
52
|
+
def clear_cache():
|
|
53
|
+
if len(record_cache) < CACHE_SIZE:
|
|
54
|
+
return
|
|
55
|
+
|
|
56
|
+
for k, (t, _) in zip(sorted(record_cache.items(), key=lambda x: x[1][0]),
|
|
57
|
+
range(len(record_cache) - CACHE_SIZE)):
|
|
58
|
+
del record_cache[k]
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def flush_cache():
|
|
62
|
+
for k, (t, r) in record_cache.items():
|
|
63
|
+
r.flush()
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def get_local_record(session: Session, id: int, datapath: Path) -> Record:
|
|
67
|
+
record_in_db = session.get(RecordInDB, id)
|
|
68
|
+
record_in_db.atime = utcnow()
|
|
69
|
+
|
|
70
|
+
if record_in_db.file.endswith('.zip'):
|
|
71
|
+
return Record.load(datapath / 'objects' / record_in_db.file)
|
|
72
|
+
|
|
73
|
+
path = datapath / 'objects' / record_in_db.file
|
|
74
|
+
with open(path, 'rb') as f:
|
|
75
|
+
record = dill.load(f)
|
|
76
|
+
record.database = datapath
|
|
77
|
+
record._file = path
|
|
78
|
+
return record
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
def get_record(session: Session, id: int, datapath: Path) -> Record:
|
|
82
|
+
if id not in record_cache:
|
|
83
|
+
record = get_local_record(session, id, datapath)
|
|
84
|
+
else:
|
|
85
|
+
record = record_cache[id][1]
|
|
86
|
+
clear_cache()
|
|
87
|
+
record_cache[id] = time.time(), record
|
|
88
|
+
return record
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
def record_create(session: Session, description: dict, datapath: Path) -> int:
|
|
92
|
+
record = Record(None, datapath, description)
|
|
93
|
+
record_in_db = RecordInDB()
|
|
94
|
+
if 'app' in description:
|
|
95
|
+
record_in_db.app = description['app']
|
|
96
|
+
if 'tags' in description:
|
|
97
|
+
record_in_db.tags = [tag(session, t) for t in description['tags']]
|
|
98
|
+
record_in_db.file = '/'.join(record._file.parts[-4:])
|
|
99
|
+
record_in_db.config_id = description['config']
|
|
100
|
+
record._file = datapath / 'objects' / record_in_db.file
|
|
101
|
+
session.add(record_in_db)
|
|
102
|
+
try:
|
|
103
|
+
session.commit()
|
|
104
|
+
record.id = record_in_db.id
|
|
105
|
+
clear_cache()
|
|
106
|
+
record_cache[record.id] = time.time(), record
|
|
107
|
+
return record.id
|
|
108
|
+
except:
|
|
109
|
+
session.rollback()
|
|
110
|
+
raise
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
def record_append(session: Session, record_id: int, level: int, step: int,
|
|
114
|
+
position: int, variables: dict, datapath: Path):
|
|
115
|
+
record = get_record(session, record_id, datapath)
|
|
116
|
+
record.append(level, step, position, variables)
|
|
117
|
+
try:
|
|
118
|
+
record_in_db = session.get(RecordInDB, record_id)
|
|
119
|
+
record_in_db.mtime = utcnow()
|
|
120
|
+
record_in_db.atime = utcnow()
|
|
121
|
+
session.commit()
|
|
122
|
+
except:
|
|
123
|
+
session.rollback()
|
|
124
|
+
raise
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
def record_delete(session: Session, record_id: int, datapath: Path):
|
|
128
|
+
record = get_local_record(session, record_id, datapath)
|
|
129
|
+
record.delete()
|
|
130
|
+
record_in_db = session.get(RecordInDB, record_id)
|
|
131
|
+
session.delete(record_in_db)
|
|
132
|
+
session.commit()
|
|
133
|
+
|
|
134
|
+
|
|
30
135
|
@logger.catch
|
|
31
|
-
async def handle(request: Request):
|
|
136
|
+
async def handle(session: Session, request: Request, datapath: Path):
|
|
32
137
|
|
|
33
138
|
msg = request.msg
|
|
34
139
|
|
|
35
140
|
match request.method:
|
|
36
141
|
case 'ping':
|
|
37
142
|
await reply(request, 'pong')
|
|
143
|
+
case 'bufferlist_slice':
|
|
144
|
+
record = get_record(session, msg['record_id'], datapath)
|
|
145
|
+
bufferlist = record.get(msg['key'],
|
|
146
|
+
buffer_to_array=False,
|
|
147
|
+
slice=msg['slice'])
|
|
148
|
+
await reply(request, list(bufferlist.iter()))
|
|
149
|
+
case 'record_create':
|
|
150
|
+
description = dill.loads(msg['description'])
|
|
151
|
+
await reply(request, record_create(session, description, datapath))
|
|
152
|
+
case 'record_append':
|
|
153
|
+
record_append(session, msg['record_id'], msg['level'], msg['step'],
|
|
154
|
+
msg['position'], msg['variables'], datapath)
|
|
155
|
+
case 'record_description':
|
|
156
|
+
record = get_record(session, msg['record_id'], datapath)
|
|
157
|
+
await reply(request, dill.dumps(record))
|
|
158
|
+
case 'record_getitem':
|
|
159
|
+
record = get_record(session, msg['record_id'], datapath)
|
|
160
|
+
await reply(request, record.get(msg['key'], buffer_to_array=False))
|
|
161
|
+
case 'record_keys':
|
|
162
|
+
record = get_record(session, msg['record_id'], datapath)
|
|
163
|
+
await reply(request, record.keys())
|
|
164
|
+
case 'record_query':
|
|
165
|
+
total, apps, table = query_record(session,
|
|
166
|
+
offset=msg.get('offset', 0),
|
|
167
|
+
limit=msg.get('limit', 10),
|
|
168
|
+
app=msg.get('app', None),
|
|
169
|
+
tags=msg.get('tags', ()),
|
|
170
|
+
before=msg.get('before', None),
|
|
171
|
+
after=msg.get('after', None))
|
|
172
|
+
await reply(request, (total, apps, table))
|
|
173
|
+
case 'record_get_tags':
|
|
174
|
+
record_in_db = session.get(RecordInDB, msg['record_id'])
|
|
175
|
+
await reply(request, [t.name for t in record_in_db.tags])
|
|
176
|
+
case 'record_remove_tags':
|
|
177
|
+
remove_tags(session, msg['record_id'], msg['tags'])
|
|
178
|
+
case 'record_add_tags':
|
|
179
|
+
update_tags(session, msg['record_id'], msg['tags'], True)
|
|
180
|
+
case 'record_replace_tags':
|
|
181
|
+
update_tags(session, msg['record_id'], msg['tags'], False)
|
|
182
|
+
case 'notebook_create':
|
|
183
|
+
notebook = create_notebook(session, msg['name'])
|
|
184
|
+
session.commit()
|
|
185
|
+
await reply(request, notebook.id)
|
|
186
|
+
case 'notebook_extend':
|
|
187
|
+
notebook = session.get(Notebook, msg['notebook_id'])
|
|
188
|
+
inputCells = msg.get('input_cells', [""])
|
|
189
|
+
aready_saved = len(notebook.cells)
|
|
190
|
+
if len(inputCells) > aready_saved:
|
|
191
|
+
for cell in inputCells[aready_saved:]:
|
|
192
|
+
cell = create_cell(session, notebook, cell)
|
|
193
|
+
session.commit()
|
|
194
|
+
await reply(request, cell.id)
|
|
195
|
+
else:
|
|
196
|
+
await reply(request, None)
|
|
197
|
+
case 'notebook_history':
|
|
198
|
+
cell = session.get(Cell, msg['cell_id'])
|
|
199
|
+
if cell:
|
|
200
|
+
await reply(request, [
|
|
201
|
+
cell.input.text
|
|
202
|
+
for cell in cell.notebook.cells[1:cell.index + 2]
|
|
203
|
+
])
|
|
204
|
+
else:
|
|
205
|
+
await reply(request, None)
|
|
206
|
+
case 'config_get':
|
|
207
|
+
config = get_config(session,
|
|
208
|
+
msg['config_id'],
|
|
209
|
+
base=datapath / 'objects')
|
|
210
|
+
session.commit()
|
|
211
|
+
await reply(request, config)
|
|
212
|
+
case 'config_update':
|
|
213
|
+
config = create_config(session,
|
|
214
|
+
msg['update'],
|
|
215
|
+
base=datapath / 'objects',
|
|
216
|
+
filename='/'.join(
|
|
217
|
+
random_path(datapath /
|
|
218
|
+
'objects').parts[-4:]))
|
|
219
|
+
session.commit()
|
|
220
|
+
await reply(request, config.id)
|
|
38
221
|
case 'submit':
|
|
222
|
+
from .scan import Scan
|
|
39
223
|
description = dill.loads(msg['description'])
|
|
40
224
|
task = Scan()
|
|
41
225
|
task.description = description
|
|
@@ -55,24 +239,42 @@ async def handle(request: Request):
|
|
|
55
239
|
logger.error(f"Unknown method: {msg['method']}")
|
|
56
240
|
|
|
57
241
|
|
|
58
|
-
async def _handle(request: Request):
|
|
242
|
+
async def _handle(session: Session, request: Request, datapath: Path):
|
|
59
243
|
try:
|
|
60
|
-
await handle(request)
|
|
244
|
+
await handle(session, request, datapath)
|
|
61
245
|
except:
|
|
62
246
|
await reply(request, 'error')
|
|
63
247
|
|
|
64
248
|
|
|
65
|
-
async def serv(port
|
|
249
|
+
async def serv(port,
|
|
250
|
+
datapath,
|
|
251
|
+
url=None,
|
|
252
|
+
buffer_size=1024 * 1024 * 1024,
|
|
253
|
+
interval=60):
|
|
66
254
|
logger.info('Server starting.')
|
|
67
255
|
async with ZMQContextManager(zmq.ROUTER, bind=f"tcp://*:{port}") as sock:
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
256
|
+
if url is None:
|
|
257
|
+
url = 'sqlite:///' + str(datapath / 'data.db')
|
|
258
|
+
engine = create_engine(url)
|
|
259
|
+
create_tables(engine)
|
|
260
|
+
Session = sessionmaker(engine)
|
|
261
|
+
with Session() as session:
|
|
262
|
+
logger.info('Server started.')
|
|
263
|
+
received = 0
|
|
264
|
+
last_flush_time = time.time()
|
|
265
|
+
while True:
|
|
266
|
+
identity, msg = await sock.recv_multipart()
|
|
267
|
+
received += len(msg)
|
|
268
|
+
req = Request(sock, identity, msg)
|
|
269
|
+
asyncio.create_task(_handle(session, req, datapath))
|
|
270
|
+
if received > buffer_size or time.time(
|
|
271
|
+
) - last_flush_time > interval:
|
|
272
|
+
flush_cache()
|
|
273
|
+
received = 0
|
|
274
|
+
last_flush_time = time.time()
|
|
73
275
|
|
|
74
276
|
|
|
75
|
-
async def watch(port, timeout=1):
|
|
277
|
+
async def watch(port, datapath, url=None, timeout=1, buffer=1024, interval=60):
|
|
76
278
|
with ZMQContextManager(zmq.DEALER,
|
|
77
279
|
connect=f"tcp://127.0.0.1:{port}") as sock:
|
|
78
280
|
sock.setsockopt(zmq.LINGER, 0)
|
|
@@ -84,20 +286,34 @@ async def watch(port, timeout=1):
|
|
|
84
286
|
else:
|
|
85
287
|
raise asyncio.TimeoutError()
|
|
86
288
|
except (zmq.error.ZMQError, asyncio.TimeoutError):
|
|
87
|
-
return asyncio.create_task(
|
|
289
|
+
return asyncio.create_task(
|
|
290
|
+
serv(port, datapath, url, buffer * 1024 * 1024, interval))
|
|
88
291
|
await asyncio.sleep(timeout)
|
|
89
292
|
|
|
90
293
|
|
|
91
|
-
async def main(port, timeout=1):
|
|
92
|
-
task = await watch(port=port,
|
|
294
|
+
async def main(port, datapath, url, timeout=1, buffer=1024, interval=60):
|
|
295
|
+
task = await watch(port=port,
|
|
296
|
+
datapath=datapath,
|
|
297
|
+
url=url,
|
|
298
|
+
timeout=timeout,
|
|
299
|
+
buffer=buffer,
|
|
300
|
+
interval=interval)
|
|
93
301
|
await task
|
|
94
302
|
|
|
95
303
|
|
|
96
304
|
@click.command()
|
|
97
|
-
@click.option('--port',
|
|
305
|
+
@click.option('--port',
|
|
306
|
+
default=os.getenv('QULAB_RECORD_PORT', 6789),
|
|
307
|
+
help='Port of the server.')
|
|
308
|
+
@click.option('--datapath', default=datapath, help='Path of the data.')
|
|
309
|
+
@click.option('--url', default=None, help='URL of the database.')
|
|
98
310
|
@click.option('--timeout', default=1, help='Timeout of ping.')
|
|
99
|
-
|
|
100
|
-
|
|
311
|
+
@click.option('--buffer', default=1024, help='Buffer size (MB).')
|
|
312
|
+
@click.option('--interval',
|
|
313
|
+
default=60,
|
|
314
|
+
help='Interval of flush cache, in unit of second.')
|
|
315
|
+
def server(port, datapath, url, timeout, buffer, interval):
|
|
316
|
+
asyncio.run(main(port, Path(datapath), url, timeout, buffer, interval))
|
|
101
317
|
|
|
102
318
|
|
|
103
319
|
if __name__ == "__main__":
|
qulab/sys/rpc/zmq_socket.py
CHANGED
|
@@ -98,7 +98,8 @@ class ZMQContextManager:
|
|
|
98
98
|
public_keys_location: Optional[str] = None,
|
|
99
99
|
secret_key: Optional[bytes] = None,
|
|
100
100
|
public_key: Optional[bytes] = None,
|
|
101
|
-
server_public_key: Optional[bytes] = None
|
|
101
|
+
server_public_key: Optional[bytes] = None,
|
|
102
|
+
socket: Optional[zmq.Socket] = None):
|
|
102
103
|
self.socket_type = socket_type
|
|
103
104
|
if bind is None and connect is None:
|
|
104
105
|
raise ValueError("Either 'bind' or 'connect' must be specified.")
|
|
@@ -129,6 +130,7 @@ class ZMQContextManager:
|
|
|
129
130
|
self.auth = None
|
|
130
131
|
self.context = None
|
|
131
132
|
self.socket = None
|
|
133
|
+
self._external_socket = socket
|
|
132
134
|
|
|
133
135
|
def _create_socket(self, asyncio=False) -> zmq.Socket:
|
|
134
136
|
"""
|
|
@@ -138,6 +140,8 @@ class ZMQContextManager:
|
|
|
138
140
|
Returns:
|
|
139
141
|
zmq.Socket: The configured ZeroMQ socket.
|
|
140
142
|
"""
|
|
143
|
+
if self._external_socket:
|
|
144
|
+
return self._external_socket
|
|
141
145
|
if asyncio:
|
|
142
146
|
self.context = zmq.asyncio.Context()
|
|
143
147
|
else:
|
|
@@ -185,6 +189,8 @@ class ZMQContextManager:
|
|
|
185
189
|
Closes the ZeroMQ socket and the context, and stops the authenticator
|
|
186
190
|
if it was started.
|
|
187
191
|
"""
|
|
192
|
+
if self._external_socket:
|
|
193
|
+
return
|
|
188
194
|
if self.observer:
|
|
189
195
|
self.observer.stop()
|
|
190
196
|
self.observer.join()
|
qulab/version.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = "2.1.
|
|
1
|
+
__version__ = "2.1.4"
|
qulab/scan/recorder.py
DELETED
|
@@ -1,247 +0,0 @@
|
|
|
1
|
-
import asyncio
|
|
2
|
-
import os
|
|
3
|
-
import pickle
|
|
4
|
-
import time
|
|
5
|
-
from pathlib import Path
|
|
6
|
-
|
|
7
|
-
import click
|
|
8
|
-
import dill
|
|
9
|
-
import zmq
|
|
10
|
-
from loguru import logger
|
|
11
|
-
|
|
12
|
-
from qulab.sys.rpc.zmq_socket import ZMQContextManager
|
|
13
|
-
|
|
14
|
-
from .curd import query_record, remove_tags, tag, update_tags
|
|
15
|
-
from .models import Record as RecordInDB
|
|
16
|
-
from .models import Session, create_engine, create_tables, sessionmaker, utcnow
|
|
17
|
-
from .record import Record
|
|
18
|
-
|
|
19
|
-
try:
|
|
20
|
-
default_record_port = int(os.getenv('QULAB_RECORD_PORT', 6789))
|
|
21
|
-
except:
|
|
22
|
-
default_record_port = 6789
|
|
23
|
-
|
|
24
|
-
if os.getenv('QULAB_RECORD_PATH'):
|
|
25
|
-
datapath = Path(os.getenv('QULAB_RECORD_PATH'))
|
|
26
|
-
else:
|
|
27
|
-
datapath = Path.home() / 'qulab' / 'data'
|
|
28
|
-
datapath.mkdir(parents=True, exist_ok=True)
|
|
29
|
-
|
|
30
|
-
record_cache = {}
|
|
31
|
-
CACHE_SIZE = 1024
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
class Request():
|
|
35
|
-
__slots__ = ['sock', 'identity', 'msg', 'method']
|
|
36
|
-
|
|
37
|
-
def __init__(self, sock, identity, msg):
|
|
38
|
-
self.sock = sock
|
|
39
|
-
self.identity = identity
|
|
40
|
-
self.msg = pickle.loads(msg)
|
|
41
|
-
self.method = self.msg.get('method', '')
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
async def reply(req, resp):
|
|
45
|
-
await req.sock.send_multipart([req.identity, pickle.dumps(resp)])
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
def clear_cache():
|
|
49
|
-
if len(record_cache) < CACHE_SIZE:
|
|
50
|
-
return
|
|
51
|
-
|
|
52
|
-
for k, (t, _) in zip(sorted(record_cache.items(), key=lambda x: x[1][0]),
|
|
53
|
-
range(len(record_cache) - CACHE_SIZE)):
|
|
54
|
-
del record_cache[k]
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
def flush_cache():
|
|
58
|
-
for k, (t, r) in record_cache.items():
|
|
59
|
-
r.flush()
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
def get_local_record(session: Session, id: int, datapath: Path) -> Record:
|
|
63
|
-
record_in_db = session.get(RecordInDB, id)
|
|
64
|
-
record_in_db.atime = utcnow()
|
|
65
|
-
path = datapath / 'objects' / record_in_db.file
|
|
66
|
-
with open(path, 'rb') as f:
|
|
67
|
-
record = dill.load(f)
|
|
68
|
-
record.database = datapath
|
|
69
|
-
record._file = path
|
|
70
|
-
return record
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
def get_record(session: Session, id: int, datapath: Path) -> Record:
|
|
74
|
-
if id not in record_cache:
|
|
75
|
-
record = get_local_record(session, id, datapath)
|
|
76
|
-
else:
|
|
77
|
-
record = record_cache[id][1]
|
|
78
|
-
clear_cache()
|
|
79
|
-
record_cache[id] = time.time(), record
|
|
80
|
-
return record
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
def record_create(session: Session, description: dict, datapath: Path) -> int:
|
|
84
|
-
record = Record(None, datapath, description)
|
|
85
|
-
record_in_db = RecordInDB()
|
|
86
|
-
if 'app' in description:
|
|
87
|
-
record_in_db.app = description['app']
|
|
88
|
-
if 'tags' in description:
|
|
89
|
-
record_in_db.tags = [tag(session, t) for t in description['tags']]
|
|
90
|
-
record_in_db.file = '/'.join(record._file.parts[-4:])
|
|
91
|
-
record._file = datapath / 'objects' / record_in_db.file
|
|
92
|
-
session.add(record_in_db)
|
|
93
|
-
try:
|
|
94
|
-
session.commit()
|
|
95
|
-
record.id = record_in_db.id
|
|
96
|
-
clear_cache()
|
|
97
|
-
record_cache[record.id] = time.time(), record
|
|
98
|
-
return record.id
|
|
99
|
-
except:
|
|
100
|
-
session.rollback()
|
|
101
|
-
raise
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
def record_append(session: Session, record_id: int, level: int, step: int,
|
|
105
|
-
position: int, variables: dict, datapath: Path):
|
|
106
|
-
record = get_record(session, record_id, datapath)
|
|
107
|
-
record.append(level, step, position, variables)
|
|
108
|
-
try:
|
|
109
|
-
record_in_db = session.get(RecordInDB, record_id)
|
|
110
|
-
record_in_db.mtime = utcnow()
|
|
111
|
-
record_in_db.atime = utcnow()
|
|
112
|
-
session.commit()
|
|
113
|
-
except:
|
|
114
|
-
session.rollback()
|
|
115
|
-
raise
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
@logger.catch
|
|
119
|
-
async def handle(session: Session, request: Request, datapath: Path):
|
|
120
|
-
|
|
121
|
-
msg = request.msg
|
|
122
|
-
|
|
123
|
-
match request.method:
|
|
124
|
-
case 'ping':
|
|
125
|
-
await reply(request, 'pong')
|
|
126
|
-
case 'bufferlist_slice':
|
|
127
|
-
record = get_record(session, msg['record_id'], datapath)
|
|
128
|
-
bufferlist = record.get(msg['key'],
|
|
129
|
-
buffer_to_array=False,
|
|
130
|
-
slice=msg['slice'])
|
|
131
|
-
await reply(request, list(bufferlist.iter()))
|
|
132
|
-
case 'record_create':
|
|
133
|
-
description = dill.loads(msg['description'])
|
|
134
|
-
await reply(request, record_create(session, description, datapath))
|
|
135
|
-
case 'record_append':
|
|
136
|
-
record_append(session, msg['record_id'], msg['level'], msg['step'],
|
|
137
|
-
msg['position'], msg['variables'], datapath)
|
|
138
|
-
case 'record_description':
|
|
139
|
-
record = get_record(session, msg['record_id'], datapath)
|
|
140
|
-
await reply(request, dill.dumps(record))
|
|
141
|
-
case 'record_getitem':
|
|
142
|
-
record = get_record(session, msg['record_id'], datapath)
|
|
143
|
-
await reply(request, record.get(msg['key'], buffer_to_array=False))
|
|
144
|
-
case 'record_keys':
|
|
145
|
-
record = get_record(session, msg['record_id'], datapath)
|
|
146
|
-
await reply(request, record.keys())
|
|
147
|
-
case 'record_query':
|
|
148
|
-
total, apps, table = query_record(session,
|
|
149
|
-
offset=msg.get('offset', 0),
|
|
150
|
-
limit=msg.get('limit', 10),
|
|
151
|
-
app=msg.get('app', None),
|
|
152
|
-
tags=msg.get('tags', ()),
|
|
153
|
-
before=msg.get('before', None),
|
|
154
|
-
after=msg.get('after', None))
|
|
155
|
-
await reply(request, (total, apps, table))
|
|
156
|
-
case 'record_get_tags':
|
|
157
|
-
record_in_db = session.get(RecordInDB, msg['record_id'])
|
|
158
|
-
await reply(request, [t.name for t in record_in_db.tags])
|
|
159
|
-
case 'record_remove_tags':
|
|
160
|
-
remove_tags(session, msg['record_id'], msg['tags'])
|
|
161
|
-
case 'record_add_tags':
|
|
162
|
-
update_tags(session, msg['record_id'], msg['tags'], True)
|
|
163
|
-
case 'record_replace_tags':
|
|
164
|
-
update_tags(session, msg['record_id'], msg['tags'], False)
|
|
165
|
-
case _:
|
|
166
|
-
logger.error(f"Unknown method: {msg['method']}")
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
async def _handle(session: Session, request: Request, datapath: Path):
|
|
170
|
-
try:
|
|
171
|
-
await handle(session, request, datapath)
|
|
172
|
-
except:
|
|
173
|
-
await reply(request, 'error')
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
async def serv(port,
|
|
177
|
-
datapath,
|
|
178
|
-
url=None,
|
|
179
|
-
buffer_size=1024 * 1024 * 1024,
|
|
180
|
-
interval=60):
|
|
181
|
-
logger.info('Server starting.')
|
|
182
|
-
async with ZMQContextManager(zmq.ROUTER, bind=f"tcp://*:{port}") as sock:
|
|
183
|
-
if url is None:
|
|
184
|
-
url = 'sqlite:///' + str(datapath / 'data.db')
|
|
185
|
-
engine = create_engine(url)
|
|
186
|
-
create_tables(engine)
|
|
187
|
-
Session = sessionmaker(engine)
|
|
188
|
-
with Session() as session:
|
|
189
|
-
logger.info('Server started.')
|
|
190
|
-
received = 0
|
|
191
|
-
last_flush_time = time.time()
|
|
192
|
-
while True:
|
|
193
|
-
identity, msg = await sock.recv_multipart()
|
|
194
|
-
received += len(msg)
|
|
195
|
-
req = Request(sock, identity, msg)
|
|
196
|
-
asyncio.create_task(_handle(session, req, datapath))
|
|
197
|
-
if received > buffer_size or time.time(
|
|
198
|
-
) - last_flush_time > interval:
|
|
199
|
-
flush_cache()
|
|
200
|
-
received = 0
|
|
201
|
-
last_flush_time = time.time()
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
async def watch(port, datapath, url=None, timeout=1, buffer=1024, interval=60):
|
|
205
|
-
with ZMQContextManager(zmq.DEALER,
|
|
206
|
-
connect=f"tcp://127.0.0.1:{port}") as sock:
|
|
207
|
-
sock.setsockopt(zmq.LINGER, 0)
|
|
208
|
-
while True:
|
|
209
|
-
try:
|
|
210
|
-
sock.send_pyobj({"method": "ping"})
|
|
211
|
-
if sock.poll(int(1000 * timeout)):
|
|
212
|
-
sock.recv()
|
|
213
|
-
else:
|
|
214
|
-
raise asyncio.TimeoutError()
|
|
215
|
-
except (zmq.error.ZMQError, asyncio.TimeoutError):
|
|
216
|
-
return asyncio.create_task(
|
|
217
|
-
serv(port, datapath, url, buffer * 1024 * 1024, interval))
|
|
218
|
-
await asyncio.sleep(timeout)
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
async def main(port, datapath, url, timeout=1, buffer=1024, interval=60):
|
|
222
|
-
task = await watch(port=port,
|
|
223
|
-
datapath=datapath,
|
|
224
|
-
url=url,
|
|
225
|
-
timeout=timeout,
|
|
226
|
-
buffer=buffer,
|
|
227
|
-
interval=interval)
|
|
228
|
-
await task
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
@click.command()
|
|
232
|
-
@click.option('--port',
|
|
233
|
-
default=os.getenv('QULAB_RECORD_PORT', 6789),
|
|
234
|
-
help='Port of the server.')
|
|
235
|
-
@click.option('--datapath', default=datapath, help='Path of the data.')
|
|
236
|
-
@click.option('--url', default=None, help='URL of the database.')
|
|
237
|
-
@click.option('--timeout', default=1, help='Timeout of ping.')
|
|
238
|
-
@click.option('--buffer', default=1024, help='Buffer size (MB).')
|
|
239
|
-
@click.option('--interval',
|
|
240
|
-
default=60,
|
|
241
|
-
help='Interval of flush cache, in unit of second.')
|
|
242
|
-
def record(port, datapath, url, timeout, buffer, interval):
|
|
243
|
-
asyncio.run(main(port, Path(datapath), url, timeout, buffer, interval))
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
if __name__ == "__main__":
|
|
247
|
-
record()
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|