QuLab 2.10.10__cp313-cp313-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- qulab/__init__.py +33 -0
- qulab/__main__.py +4 -0
- qulab/cli/__init__.py +0 -0
- qulab/cli/commands.py +30 -0
- qulab/cli/config.py +170 -0
- qulab/cli/decorators.py +28 -0
- qulab/dicttree.py +523 -0
- qulab/executor/__init__.py +5 -0
- qulab/executor/analyze.py +188 -0
- qulab/executor/cli.py +434 -0
- qulab/executor/load.py +563 -0
- qulab/executor/registry.py +185 -0
- qulab/executor/schedule.py +543 -0
- qulab/executor/storage.py +615 -0
- qulab/executor/template.py +259 -0
- qulab/executor/utils.py +194 -0
- qulab/expression.py +827 -0
- qulab/fun.cp313-win_amd64.pyd +0 -0
- qulab/monitor/__init__.py +1 -0
- qulab/monitor/__main__.py +8 -0
- qulab/monitor/config.py +41 -0
- qulab/monitor/dataset.py +77 -0
- qulab/monitor/event_queue.py +54 -0
- qulab/monitor/mainwindow.py +234 -0
- qulab/monitor/monitor.py +115 -0
- qulab/monitor/ploter.py +123 -0
- qulab/monitor/qt_compat.py +16 -0
- qulab/monitor/toolbar.py +265 -0
- qulab/scan/__init__.py +2 -0
- qulab/scan/curd.py +221 -0
- qulab/scan/models.py +554 -0
- qulab/scan/optimize.py +76 -0
- qulab/scan/query.py +387 -0
- qulab/scan/record.py +603 -0
- qulab/scan/scan.py +1166 -0
- qulab/scan/server.py +450 -0
- qulab/scan/space.py +213 -0
- qulab/scan/utils.py +234 -0
- qulab/storage/__init__.py +0 -0
- qulab/storage/__main__.py +51 -0
- qulab/storage/backend/__init__.py +0 -0
- qulab/storage/backend/redis.py +204 -0
- qulab/storage/base_dataset.py +352 -0
- qulab/storage/chunk.py +60 -0
- qulab/storage/dataset.py +127 -0
- qulab/storage/file.py +273 -0
- qulab/storage/models/__init__.py +22 -0
- qulab/storage/models/base.py +4 -0
- qulab/storage/models/config.py +28 -0
- qulab/storage/models/file.py +89 -0
- qulab/storage/models/ipy.py +58 -0
- qulab/storage/models/models.py +88 -0
- qulab/storage/models/record.py +161 -0
- qulab/storage/models/report.py +22 -0
- qulab/storage/models/tag.py +93 -0
- qulab/storage/storage.py +95 -0
- qulab/sys/__init__.py +2 -0
- qulab/sys/chat.py +688 -0
- qulab/sys/device/__init__.py +3 -0
- qulab/sys/device/basedevice.py +255 -0
- qulab/sys/device/loader.py +86 -0
- qulab/sys/device/utils.py +79 -0
- qulab/sys/drivers/FakeInstrument.py +68 -0
- qulab/sys/drivers/__init__.py +0 -0
- qulab/sys/ipy_events.py +125 -0
- qulab/sys/net/__init__.py +0 -0
- qulab/sys/net/bencoder.py +205 -0
- qulab/sys/net/cli.py +169 -0
- qulab/sys/net/dhcp.py +543 -0
- qulab/sys/net/dhcpd.py +176 -0
- qulab/sys/net/kad.py +1142 -0
- qulab/sys/net/kcp.py +192 -0
- qulab/sys/net/nginx.py +194 -0
- qulab/sys/progress.py +190 -0
- qulab/sys/rpc/__init__.py +0 -0
- qulab/sys/rpc/client.py +0 -0
- qulab/sys/rpc/exceptions.py +96 -0
- qulab/sys/rpc/msgpack.py +1052 -0
- qulab/sys/rpc/msgpack.pyi +41 -0
- qulab/sys/rpc/router.py +35 -0
- qulab/sys/rpc/rpc.py +412 -0
- qulab/sys/rpc/serialize.py +139 -0
- qulab/sys/rpc/server.py +29 -0
- qulab/sys/rpc/socket.py +29 -0
- qulab/sys/rpc/utils.py +25 -0
- qulab/sys/rpc/worker.py +0 -0
- qulab/sys/rpc/zmq_socket.py +227 -0
- qulab/tools/__init__.py +0 -0
- qulab/tools/connection_helper.py +39 -0
- qulab/typing.py +2 -0
- qulab/utils.py +95 -0
- qulab/version.py +1 -0
- qulab/visualization/__init__.py +188 -0
- qulab/visualization/__main__.py +71 -0
- qulab/visualization/_autoplot.py +464 -0
- qulab/visualization/plot_circ.py +319 -0
- qulab/visualization/plot_layout.py +408 -0
- qulab/visualization/plot_seq.py +242 -0
- qulab/visualization/qdat.py +152 -0
- qulab/visualization/rot3d.py +23 -0
- qulab/visualization/widgets.py +86 -0
- qulab-2.10.10.dist-info/METADATA +110 -0
- qulab-2.10.10.dist-info/RECORD +107 -0
- qulab-2.10.10.dist-info/WHEEL +5 -0
- qulab-2.10.10.dist-info/entry_points.txt +2 -0
- qulab-2.10.10.dist-info/licenses/LICENSE +21 -0
- qulab-2.10.10.dist-info/top_level.txt +1 -0
qulab/storage/file.py
ADDED
@@ -0,0 +1,273 @@
|
|
1
|
+
import lzma
|
2
|
+
import pathlib
|
3
|
+
|
4
|
+
import dill
|
5
|
+
import numpy as np
|
6
|
+
|
7
|
+
MAGIC = b'wAvEDatA'
|
8
|
+
|
9
|
+
lzma_filters = [{
|
10
|
+
'id': lzma.FILTER_LZMA2,
|
11
|
+
'preset': 9 | lzma.PRESET_EXTREME,
|
12
|
+
}]
|
13
|
+
|
14
|
+
FORMAT_RAW = 1
|
15
|
+
FORMAT_XZ = 2
|
16
|
+
|
17
|
+
|
18
|
+
def _get_header(file):
|
19
|
+
assert MAGIC == file.read(len(MAGIC)), 'Invalid file format'
|
20
|
+
version = file.read(1)
|
21
|
+
version = int.from_bytes(version, 'big', signed=False)
|
22
|
+
file_format = file.read(1)
|
23
|
+
file_format = int.from_bytes(file_format, 'big', signed=False)
|
24
|
+
info_size = file.read(2)
|
25
|
+
info_size = int.from_bytes(info_size, 'big', signed=False)
|
26
|
+
header = lzma.decompress(file.read(info_size),
|
27
|
+
format=lzma.FORMAT_RAW,
|
28
|
+
filters=lzma_filters)
|
29
|
+
info = dill.loads(header)
|
30
|
+
return {'version': version, 'format': file_format, 'info': info}
|
31
|
+
|
32
|
+
|
33
|
+
def _make_header(header: dict):
|
34
|
+
version = header.get('version', 1)
|
35
|
+
file_format = header.get('format', 1)
|
36
|
+
info = header.get('info', {})
|
37
|
+
header = lzma.compress(dill.dumps(info),
|
38
|
+
format=lzma.FORMAT_RAW,
|
39
|
+
filters=lzma_filters)
|
40
|
+
assert len(header) < 2**16, 'Header too large'
|
41
|
+
version = version.to_bytes(1, 'big', signed=False)
|
42
|
+
file_format = file_format.to_bytes(1, 'big', signed=False)
|
43
|
+
info_size = len(header).to_bytes(2, 'big', signed=False)
|
44
|
+
return MAGIC + version + file_format + info_size + header
|
45
|
+
|
46
|
+
|
47
|
+
class FakeLock():
|
48
|
+
|
49
|
+
def __enter__(self):
|
50
|
+
pass
|
51
|
+
|
52
|
+
def __exit__(self, *args):
|
53
|
+
pass
|
54
|
+
|
55
|
+
async def __aenter__(self):
|
56
|
+
pass
|
57
|
+
|
58
|
+
async def __aexit__(self, *args):
|
59
|
+
pass
|
60
|
+
|
61
|
+
|
62
|
+
class BaseFile():
|
63
|
+
|
64
|
+
def __init__(self, path: str | pathlib.Path, lock=None):
|
65
|
+
self.path = path
|
66
|
+
self.lock = lock or FakeLock()
|
67
|
+
|
68
|
+
def compress(self):
|
69
|
+
with self.lock:
|
70
|
+
self._compress()
|
71
|
+
|
72
|
+
def decompress(self):
|
73
|
+
with self.lock:
|
74
|
+
self._decompress()
|
75
|
+
|
76
|
+
def _compress(self):
|
77
|
+
with open(self.path, 'rb+') as f:
|
78
|
+
try:
|
79
|
+
header = _get_header(f)
|
80
|
+
except:
|
81
|
+
return
|
82
|
+
if header.get('format', FORMAT_RAW) == FORMAT_XZ:
|
83
|
+
return
|
84
|
+
header['format'] = FORMAT_XZ
|
85
|
+
buffer = f.read()
|
86
|
+
f.seek(0)
|
87
|
+
f.write(_make_header(header))
|
88
|
+
f.write(
|
89
|
+
lzma.compress(buffer,
|
90
|
+
format=lzma.FORMAT_RAW,
|
91
|
+
filters=lzma_filters))
|
92
|
+
f.truncate()
|
93
|
+
|
94
|
+
def _decompress(self):
|
95
|
+
with open(self.path, 'rb+') as f:
|
96
|
+
try:
|
97
|
+
header = _get_header(f)
|
98
|
+
except:
|
99
|
+
return
|
100
|
+
if header.get('format', FORMAT_RAW) == FORMAT_RAW:
|
101
|
+
return
|
102
|
+
header['format'] = FORMAT_RAW
|
103
|
+
buffer = lzma.decompress(f.read(),
|
104
|
+
format=lzma.FORMAT_RAW,
|
105
|
+
filters=lzma_filters)
|
106
|
+
f.seek(0)
|
107
|
+
f.write(_make_header(header))
|
108
|
+
f.write(buffer)
|
109
|
+
f.truncate()
|
110
|
+
|
111
|
+
|
112
|
+
class BinaryFile(BaseFile):
|
113
|
+
|
114
|
+
def read(self):
|
115
|
+
with self.lock:
|
116
|
+
with open(self.path, 'rb') as f:
|
117
|
+
header = _get_header(f)
|
118
|
+
if header.get('format', FORMAT_RAW) == FORMAT_XZ:
|
119
|
+
return lzma.decompress(f.read(),
|
120
|
+
format=lzma.FORMAT_RAW,
|
121
|
+
filters=lzma_filters)
|
122
|
+
else:
|
123
|
+
return f.read()
|
124
|
+
|
125
|
+
def write(self, data: bytes):
|
126
|
+
with self.lock:
|
127
|
+
with open(self.path, 'wb') as f:
|
128
|
+
f.write(
|
129
|
+
_make_header({
|
130
|
+
'format': FORMAT_RAW,
|
131
|
+
'info': {
|
132
|
+
'type': 'bytes'
|
133
|
+
}
|
134
|
+
}))
|
135
|
+
f.write(data)
|
136
|
+
|
137
|
+
|
138
|
+
class ObjectFile(BinaryFile):
|
139
|
+
|
140
|
+
def load(self):
|
141
|
+
with self.lock:
|
142
|
+
with open(self.path, 'rb') as f:
|
143
|
+
header = _get_header(f)
|
144
|
+
if header.get('format', FORMAT_RAW) == FORMAT_XZ:
|
145
|
+
return dill.loads(
|
146
|
+
lzma.decompress(f.read(),
|
147
|
+
format=lzma.FORMAT_RAW,
|
148
|
+
filters=lzma_filters))
|
149
|
+
else:
|
150
|
+
return dill.load(f)
|
151
|
+
|
152
|
+
def dump(self, obj):
|
153
|
+
with self.lock:
|
154
|
+
with open(self.path, 'wb') as f:
|
155
|
+
f.write(
|
156
|
+
_make_header({
|
157
|
+
'format': FORMAT_RAW,
|
158
|
+
'info': {
|
159
|
+
'type': 'pickle'
|
160
|
+
}
|
161
|
+
}))
|
162
|
+
dill.dump(obj, f)
|
163
|
+
|
164
|
+
|
165
|
+
class ObjectListFile(ObjectFile):
|
166
|
+
|
167
|
+
def clear(self):
|
168
|
+
with self.lock:
|
169
|
+
self._decompress()
|
170
|
+
with open(self.path, 'wb') as f:
|
171
|
+
f.write(
|
172
|
+
_make_header({
|
173
|
+
'format': FORMAT_RAW,
|
174
|
+
'info': {
|
175
|
+
'type': 'pickle_list'
|
176
|
+
}
|
177
|
+
}))
|
178
|
+
|
179
|
+
def append(self, obj):
|
180
|
+
with self.lock:
|
181
|
+
with open(self.path, 'ab') as f:
|
182
|
+
if f.tell() == 0:
|
183
|
+
f.write(
|
184
|
+
_make_header({
|
185
|
+
'format': FORMAT_RAW,
|
186
|
+
'info': {
|
187
|
+
'type': 'pickle_list'
|
188
|
+
}
|
189
|
+
}))
|
190
|
+
dill.dump(obj, f)
|
191
|
+
|
192
|
+
def __iter__(self):
|
193
|
+
with self.lock:
|
194
|
+
with open(self.path, 'rb') as f:
|
195
|
+
while True:
|
196
|
+
try:
|
197
|
+
header = _get_header(f)
|
198
|
+
except AssertionError:
|
199
|
+
break
|
200
|
+
if header.get('format', FORMAT_RAW) == FORMAT_XZ:
|
201
|
+
yield from self._compressed_iter(f)
|
202
|
+
else:
|
203
|
+
yield from self._iter(f)
|
204
|
+
|
205
|
+
def _compressed_iter(self, f):
|
206
|
+
with lzma.open(f, 'rb', format=lzma.FORMAT_RAW,
|
207
|
+
filters=lzma_filters) as f:
|
208
|
+
yield from self._iter(f)
|
209
|
+
|
210
|
+
def _iter(self, f):
|
211
|
+
while True:
|
212
|
+
try:
|
213
|
+
yield dill.load(f)
|
214
|
+
except EOFError:
|
215
|
+
break
|
216
|
+
|
217
|
+
def asarray(self):
|
218
|
+
return np.array(list(self))
|
219
|
+
|
220
|
+
|
221
|
+
class ArrayFile(BaseFile):
|
222
|
+
|
223
|
+
def clear(self):
|
224
|
+
with self.lock:
|
225
|
+
with open(self.path, 'wb') as f:
|
226
|
+
f.write(b'')
|
227
|
+
|
228
|
+
def extend(self, data: np.ndarray):
|
229
|
+
with self.lock:
|
230
|
+
self._decompress()
|
231
|
+
with open(self.path, 'ab') as f:
|
232
|
+
if f.tell() == 0:
|
233
|
+
f.write(
|
234
|
+
_make_header({
|
235
|
+
'format': FORMAT_RAW,
|
236
|
+
'info': {
|
237
|
+
'type': 'array',
|
238
|
+
'dtype': data.dtype
|
239
|
+
}
|
240
|
+
}))
|
241
|
+
f.write(data.tobytes())
|
242
|
+
|
243
|
+
def append(self, data, dtype=float):
|
244
|
+
self.extend(np.asarray(data, dtype=dtype))
|
245
|
+
|
246
|
+
def asarray(self):
|
247
|
+
with self.lock:
|
248
|
+
with open(self.path, 'rb') as f:
|
249
|
+
header = _get_header(f)
|
250
|
+
dtype = header['info']['dtype']
|
251
|
+
if header.get('format', FORMAT_RAW) == FORMAT_XZ:
|
252
|
+
return np.frombuffer(
|
253
|
+
lzma.decompress(f.read(),
|
254
|
+
format=lzma.FORMAT_RAW,
|
255
|
+
filters=lzma_filters),
|
256
|
+
dtype=dtype)
|
257
|
+
else:
|
258
|
+
return np.fromfile(f, dtype=dtype)
|
259
|
+
|
260
|
+
|
261
|
+
def load(path):
|
262
|
+
with open(path, 'rb') as f:
|
263
|
+
header = _get_header(f)
|
264
|
+
if header['info']['type'] == 'array':
|
265
|
+
return ArrayFile(path)
|
266
|
+
elif header['info']['type'] == 'pickle':
|
267
|
+
return ObjectFile(path)
|
268
|
+
elif header['info']['type'] == 'pickle_list':
|
269
|
+
return ObjectListFile(path)
|
270
|
+
elif header['info']['type'] == 'bytes':
|
271
|
+
return BinaryFile(path)
|
272
|
+
else:
|
273
|
+
raise ValueError(f'Unknown file type: {header["info"]["type"]}')
|
@@ -0,0 +1,22 @@
|
|
1
|
+
from sqlalchemy import Engine
|
2
|
+
from sqlalchemy.orm import Session
|
3
|
+
|
4
|
+
from .base import Base
|
5
|
+
from .file import File, FileChunk
|
6
|
+
from .ipy import Cell, InputText, Notebook
|
7
|
+
from .record import Record
|
8
|
+
from .tag import Tag, has_tags
|
9
|
+
|
10
|
+
|
11
|
+
def create_tables(url: str):
|
12
|
+
from sqlalchemy import create_engine
|
13
|
+
engine = create_engine(url)
|
14
|
+
Base.metadata.create_all(engine)
|
15
|
+
|
16
|
+
|
17
|
+
def create_session(url: str) -> Session:
|
18
|
+
from sqlalchemy import create_engine
|
19
|
+
from sqlalchemy.orm import sessionmaker
|
20
|
+
engine = create_engine(url)
|
21
|
+
session = sessionmaker(bind=engine)
|
22
|
+
return session()
|
@@ -0,0 +1,28 @@
|
|
1
|
+
import dill
|
2
|
+
from sqlalchemy import Column, ForeignKey, Integer, String
|
3
|
+
from sqlalchemy.orm import relationship
|
4
|
+
|
5
|
+
from .base import Base
|
6
|
+
from .file import File
|
7
|
+
from .tag import Tag, has_tags
|
8
|
+
|
9
|
+
|
10
|
+
@has_tags
|
11
|
+
class Config(Base):
|
12
|
+
__tablename__ = 'configs'
|
13
|
+
|
14
|
+
id = Column(Integer, primary_key=True)
|
15
|
+
name = Column(String)
|
16
|
+
file_id = Column(Integer, ForeignKey('files.id'))
|
17
|
+
meta_id = Column(Integer, ForeignKey('files.id'))
|
18
|
+
|
19
|
+
file = relationship("File", foreign_keys=[file_id])
|
20
|
+
|
21
|
+
@property
|
22
|
+
def data(self) -> dict:
|
23
|
+
result = dill.loads(self.file.read())
|
24
|
+
return result
|
25
|
+
|
26
|
+
@data.setter
|
27
|
+
def data(self, data: dict):
|
28
|
+
self.file.write(dill.dumps(data))
|
@@ -0,0 +1,89 @@
|
|
1
|
+
import zlib
|
2
|
+
from datetime import datetime
|
3
|
+
|
4
|
+
from sqlalchemy import (Boolean, Column, DateTime, ForeignKey, Integer, String,
|
5
|
+
event)
|
6
|
+
from sqlalchemy.orm import relationship
|
7
|
+
from sqlalchemy.orm.session import Session
|
8
|
+
|
9
|
+
from ..chunk import CHUNKSIZE, delete_chunk, load_chunk, save_chunk
|
10
|
+
from . import Base
|
11
|
+
|
12
|
+
|
13
|
+
class FileChunk(Base):
|
14
|
+
__tablename__ = 'file_chunks'
|
15
|
+
|
16
|
+
id = Column(Integer, primary_key=True)
|
17
|
+
file_id = Column(Integer, ForeignKey('files.id'))
|
18
|
+
index = Column(Integer)
|
19
|
+
size = Column(Integer)
|
20
|
+
chunk_hash = Column(String)
|
21
|
+
compressed = Column(Boolean, default=False)
|
22
|
+
|
23
|
+
file = relationship("File", back_populates="chunks")
|
24
|
+
|
25
|
+
@property
|
26
|
+
def chunk(self):
|
27
|
+
if hasattr(self, '_chunk'):
|
28
|
+
return self._chunk
|
29
|
+
return load_chunk(self.chunk_hash, self.compressed)
|
30
|
+
|
31
|
+
@chunk.setter
|
32
|
+
def chunk(self, data):
|
33
|
+
self._chunk = data
|
34
|
+
|
35
|
+
|
36
|
+
@event.listens_for(FileChunk, 'before_insert')
|
37
|
+
def before_insert_file_chunk(mapper, connection, target: FileChunk):
|
38
|
+
target.chunk_hash, target.size = save_chunk(target._chunk)
|
39
|
+
|
40
|
+
|
41
|
+
class File(Base):
|
42
|
+
__tablename__ = 'files'
|
43
|
+
|
44
|
+
id = Column(Integer, primary_key=True)
|
45
|
+
ctime = Column(DateTime, default=datetime.utcnow)
|
46
|
+
mtime = Column(DateTime, default=datetime.utcnow)
|
47
|
+
atime = Column(DateTime, default=datetime.utcnow)
|
48
|
+
name = Column(String)
|
49
|
+
size = Column(Integer)
|
50
|
+
chunks = relationship("FileChunk", order_by=FileChunk.index)
|
51
|
+
|
52
|
+
def write(self, data):
|
53
|
+
self.mtime = datetime.utcnow()
|
54
|
+
self.size = len(data)
|
55
|
+
self.chunks = []
|
56
|
+
for i in range(0, len(data), CHUNKSIZE):
|
57
|
+
chunk = FileChunk()
|
58
|
+
chunk.index = i // CHUNKSIZE
|
59
|
+
chunk._chunk = data[i:i + CHUNKSIZE]
|
60
|
+
chunk.size = len(chunk._chunk)
|
61
|
+
self.chunks.append(chunk)
|
62
|
+
|
63
|
+
def read(self):
|
64
|
+
self.atime = datetime.utcnow()
|
65
|
+
return b''.join([c.chunk for c in self.chunks])
|
66
|
+
|
67
|
+
|
68
|
+
@event.listens_for(File, 'before_delete')
|
69
|
+
def before_delete_file(mapper, connection, target):
|
70
|
+
for chunk in target.chunks:
|
71
|
+
connection.execute(
|
72
|
+
FileChunk.__table__.delete().where(FileChunk.id == chunk.id))
|
73
|
+
|
74
|
+
|
75
|
+
def compress_chunks(db: Session):
|
76
|
+
for chunk in db.query(FileChunk).filter(
|
77
|
+
FileChunk.compressed == False).limit(100):
|
78
|
+
old_chunk_hash = chunk.chunk_hash
|
79
|
+
buf = zlib.compress(chunk.chunk)
|
80
|
+
chunk_hash, size = save_chunk(buf, compressed=True)
|
81
|
+
chunk.chunk_hash = chunk_hash
|
82
|
+
chunk.size = size
|
83
|
+
chunk.compressed = True
|
84
|
+
try:
|
85
|
+
db.commit()
|
86
|
+
delete_chunk(old_chunk_hash)
|
87
|
+
except:
|
88
|
+
db.rollback()
|
89
|
+
delete_chunk(chunk_hash)
|
@@ -0,0 +1,58 @@
|
|
1
|
+
import hashlib
|
2
|
+
from datetime import datetime
|
3
|
+
|
4
|
+
from sqlalchemy import (Column, DateTime, ForeignKey, Integer, LargeBinary,
|
5
|
+
String, Text)
|
6
|
+
from sqlalchemy.orm import relationship
|
7
|
+
|
8
|
+
from .base import Base
|
9
|
+
|
10
|
+
|
11
|
+
class InputText(Base):
|
12
|
+
__tablename__ = 'inputs'
|
13
|
+
|
14
|
+
id = Column(Integer, primary_key=True)
|
15
|
+
hash = Column(LargeBinary(20))
|
16
|
+
text_field = Column(Text, unique=True)
|
17
|
+
|
18
|
+
@property
|
19
|
+
def text(self):
|
20
|
+
return self.text_field
|
21
|
+
|
22
|
+
@text.setter
|
23
|
+
def text(self, text):
|
24
|
+
self.hash = hashlib.sha1(text.encode('utf-8')).digest()
|
25
|
+
self.text_field = text
|
26
|
+
|
27
|
+
def __repr__(self) -> str:
|
28
|
+
return self.text
|
29
|
+
|
30
|
+
|
31
|
+
class Cell(Base):
|
32
|
+
__tablename__ = 'cells'
|
33
|
+
|
34
|
+
id = Column(Integer, primary_key=True)
|
35
|
+
notebook_id = Column(Integer, ForeignKey("notebooks.id"))
|
36
|
+
index = Column(Integer)
|
37
|
+
ctime = Column(DateTime, default=datetime.utcnow)
|
38
|
+
ftime = Column(DateTime, default=datetime.utcnow)
|
39
|
+
input_id = Column(Integer, ForeignKey("inputs.id"))
|
40
|
+
|
41
|
+
notebook = relationship("Notebook", back_populates="cells")
|
42
|
+
input = relationship("InputText")
|
43
|
+
|
44
|
+
def __repr__(self) -> str:
|
45
|
+
return f"Cell(index={self.index}, input='{self.input}')"
|
46
|
+
|
47
|
+
|
48
|
+
class Notebook(Base):
|
49
|
+
__tablename__ = 'notebooks'
|
50
|
+
|
51
|
+
id = Column(Integer, primary_key=True)
|
52
|
+
name = Column(String)
|
53
|
+
ctime = Column(DateTime, default=datetime.utcnow)
|
54
|
+
atime = Column(DateTime, default=datetime.utcnow)
|
55
|
+
|
56
|
+
cells = relationship("Cell",
|
57
|
+
order_by=Cell.index,
|
58
|
+
back_populates="notebook")
|
@@ -0,0 +1,88 @@
|
|
1
|
+
from datetime import datetime
|
2
|
+
|
3
|
+
from sqlalchemy import (Column, DateTime, Float, ForeignKey, Integer, String,
|
4
|
+
Table, create_engine)
|
5
|
+
from sqlalchemy.ext.declarative import declarative_base
|
6
|
+
from sqlalchemy.orm import relationship, sessionmaker
|
7
|
+
|
8
|
+
Base = declarative_base()
|
9
|
+
|
10
|
+
dataset_tag_association = Table(
|
11
|
+
'dataset_tag', Base.metadata,
|
12
|
+
Column('dataset_id', Integer, ForeignKey('datasets.id'), primary_key=True),
|
13
|
+
Column('tag_id', Integer, ForeignKey('tags.id'), primary_key=True))
|
14
|
+
|
15
|
+
|
16
|
+
class Experiment(Base):
|
17
|
+
__tablename__ = 'experiments'
|
18
|
+
id = Column(Integer, primary_key=True)
|
19
|
+
name = Column(String)
|
20
|
+
description = Column(String)
|
21
|
+
start_date = Column(DateTime, default=datetime.utcnow)
|
22
|
+
end_date = Column(DateTime, default=datetime.utcnow)
|
23
|
+
researcher_id = Column(Integer, ForeignKey('researchers.id'))
|
24
|
+
datasets = relationship('Dataset', back_populates='experiment')
|
25
|
+
|
26
|
+
|
27
|
+
class Researcher(Base):
|
28
|
+
__tablename__ = 'researchers'
|
29
|
+
id = Column(Integer, primary_key=True)
|
30
|
+
first_name = Column(String)
|
31
|
+
last_name = Column(String)
|
32
|
+
email = Column(String)
|
33
|
+
affiliation = Column(String)
|
34
|
+
experiments = relationship('Experiment', back_populates='researcher')
|
35
|
+
|
36
|
+
|
37
|
+
class Dataset(Base):
|
38
|
+
__tablename__ = 'datasets'
|
39
|
+
id = Column(Integer, primary_key=True)
|
40
|
+
name = Column(String)
|
41
|
+
description = Column(String)
|
42
|
+
tags = relationship('Tag',
|
43
|
+
secondary=dataset_tag_association,
|
44
|
+
back_populates='datasets')
|
45
|
+
creation_date = Column(DateTime, default=datetime.utcnow)
|
46
|
+
modification_date = Column(DateTime, default=datetime.utcnow)
|
47
|
+
experiment_id = Column(Integer, ForeignKey('experiments.id'))
|
48
|
+
experiment = relationship('Experiment', back_populates='datasets')
|
49
|
+
data_entries = relationship('DataEntry', back_populates='dataset')
|
50
|
+
|
51
|
+
|
52
|
+
class DataEntry(Base):
|
53
|
+
__tablename__ = 'data_entries'
|
54
|
+
id = Column(Integer, primary_key=True)
|
55
|
+
timestamp = Column(DateTime)
|
56
|
+
value = Column(Float)
|
57
|
+
units = Column(String)
|
58
|
+
variable_name = Column(String)
|
59
|
+
dataset_id = Column(Integer, ForeignKey('datasets.id'))
|
60
|
+
dataset = relationship('Dataset', back_populates='data_entries')
|
61
|
+
|
62
|
+
|
63
|
+
class Variable(Base):
|
64
|
+
__tablename__ = 'variables'
|
65
|
+
id = Column(Integer, primary_key=True)
|
66
|
+
name = Column(String)
|
67
|
+
description = Column(String)
|
68
|
+
data_type = Column(String)
|
69
|
+
|
70
|
+
|
71
|
+
class Tag(Base):
|
72
|
+
__tablename__ = 'tags'
|
73
|
+
id = Column(Integer, primary_key=True)
|
74
|
+
name = Column(String, unique=True)
|
75
|
+
datasets = relationship('Dataset',
|
76
|
+
secondary=dataset_tag_association,
|
77
|
+
back_populates='tags')
|
78
|
+
|
79
|
+
|
80
|
+
# 创建数据库引擎
|
81
|
+
engine = create_engine('sqlite:///experiment_data.db')
|
82
|
+
|
83
|
+
# 创建表
|
84
|
+
Base.metadata.create_all(engine)
|
85
|
+
|
86
|
+
# 创建会话
|
87
|
+
Session = sessionmaker(bind=engine)
|
88
|
+
session = Session()
|