QuLab 2.4.0__cp312-cp312-macosx_10_13_universal2.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (97) hide show
  1. QuLab-2.4.0.dist-info/LICENSE +21 -0
  2. QuLab-2.4.0.dist-info/METADATA +105 -0
  3. QuLab-2.4.0.dist-info/RECORD +97 -0
  4. QuLab-2.4.0.dist-info/WHEEL +5 -0
  5. QuLab-2.4.0.dist-info/entry_points.txt +2 -0
  6. QuLab-2.4.0.dist-info/top_level.txt +1 -0
  7. qulab/__init__.py +3 -0
  8. qulab/__main__.py +30 -0
  9. qulab/dicttree.py +511 -0
  10. qulab/executor/__init__.py +5 -0
  11. qulab/executor/__main__.py +89 -0
  12. qulab/executor/load.py +202 -0
  13. qulab/executor/schedule.py +223 -0
  14. qulab/executor/storage.py +143 -0
  15. qulab/executor/transform.py +90 -0
  16. qulab/executor/utils.py +107 -0
  17. qulab/fun.cpython-312-darwin.so +0 -0
  18. qulab/monitor/__init__.py +1 -0
  19. qulab/monitor/__main__.py +8 -0
  20. qulab/monitor/config.py +41 -0
  21. qulab/monitor/dataset.py +77 -0
  22. qulab/monitor/event_queue.py +54 -0
  23. qulab/monitor/mainwindow.py +234 -0
  24. qulab/monitor/monitor.py +93 -0
  25. qulab/monitor/ploter.py +123 -0
  26. qulab/monitor/qt_compat.py +16 -0
  27. qulab/monitor/toolbar.py +265 -0
  28. qulab/scan/__init__.py +3 -0
  29. qulab/scan/curd.py +221 -0
  30. qulab/scan/expression.py +646 -0
  31. qulab/scan/models.py +554 -0
  32. qulab/scan/optimize.py +76 -0
  33. qulab/scan/query.py +374 -0
  34. qulab/scan/record.py +603 -0
  35. qulab/scan/scan.py +1166 -0
  36. qulab/scan/server.py +533 -0
  37. qulab/scan/space.py +213 -0
  38. qulab/scan/utils.py +229 -0
  39. qulab/storage/__init__.py +0 -0
  40. qulab/storage/__main__.py +51 -0
  41. qulab/storage/backend/__init__.py +0 -0
  42. qulab/storage/backend/redis.py +204 -0
  43. qulab/storage/base_dataset.py +352 -0
  44. qulab/storage/chunk.py +60 -0
  45. qulab/storage/dataset.py +127 -0
  46. qulab/storage/file.py +273 -0
  47. qulab/storage/models/__init__.py +22 -0
  48. qulab/storage/models/base.py +4 -0
  49. qulab/storage/models/config.py +28 -0
  50. qulab/storage/models/file.py +89 -0
  51. qulab/storage/models/ipy.py +58 -0
  52. qulab/storage/models/models.py +88 -0
  53. qulab/storage/models/record.py +161 -0
  54. qulab/storage/models/report.py +22 -0
  55. qulab/storage/models/tag.py +93 -0
  56. qulab/storage/storage.py +95 -0
  57. qulab/sys/__init__.py +0 -0
  58. qulab/sys/chat.py +688 -0
  59. qulab/sys/device/__init__.py +3 -0
  60. qulab/sys/device/basedevice.py +229 -0
  61. qulab/sys/device/loader.py +86 -0
  62. qulab/sys/device/utils.py +79 -0
  63. qulab/sys/drivers/FakeInstrument.py +52 -0
  64. qulab/sys/drivers/__init__.py +0 -0
  65. qulab/sys/ipy_events.py +125 -0
  66. qulab/sys/net/__init__.py +0 -0
  67. qulab/sys/net/bencoder.py +205 -0
  68. qulab/sys/net/cli.py +169 -0
  69. qulab/sys/net/dhcp.py +543 -0
  70. qulab/sys/net/dhcpd.py +176 -0
  71. qulab/sys/net/kad.py +1142 -0
  72. qulab/sys/net/kcp.py +192 -0
  73. qulab/sys/net/nginx.py +192 -0
  74. qulab/sys/progress.py +190 -0
  75. qulab/sys/rpc/__init__.py +0 -0
  76. qulab/sys/rpc/client.py +0 -0
  77. qulab/sys/rpc/exceptions.py +96 -0
  78. qulab/sys/rpc/msgpack.py +1052 -0
  79. qulab/sys/rpc/msgpack.pyi +41 -0
  80. qulab/sys/rpc/router.py +35 -0
  81. qulab/sys/rpc/rpc.py +412 -0
  82. qulab/sys/rpc/serialize.py +139 -0
  83. qulab/sys/rpc/server.py +29 -0
  84. qulab/sys/rpc/socket.py +29 -0
  85. qulab/sys/rpc/utils.py +25 -0
  86. qulab/sys/rpc/worker.py +0 -0
  87. qulab/sys/rpc/zmq_socket.py +220 -0
  88. qulab/version.py +1 -0
  89. qulab/visualization/__init__.py +188 -0
  90. qulab/visualization/__main__.py +71 -0
  91. qulab/visualization/_autoplot.py +464 -0
  92. qulab/visualization/plot_circ.py +319 -0
  93. qulab/visualization/plot_layout.py +408 -0
  94. qulab/visualization/plot_seq.py +242 -0
  95. qulab/visualization/qdat.py +152 -0
  96. qulab/visualization/rot3d.py +23 -0
  97. qulab/visualization/widgets.py +86 -0
qulab/scan/server.py ADDED
@@ -0,0 +1,533 @@
1
+ import asyncio
2
+ import os
3
+ import pickle
4
+ import subprocess
5
+ import sys
6
+ import time
7
+ import uuid
8
+ from pathlib import Path
9
+
10
+ import click
11
+ import dill
12
+ import zmq
13
+ from loguru import logger
14
+
15
+ from qulab.sys.rpc.zmq_socket import ZMQContextManager
16
+
17
+ from .curd import (create_cell, create_config, create_notebook, get_config,
18
+ query_record, remove_tags, tag, update_tags)
19
+ from .models import Cell, Notebook
20
+ from .models import Record as RecordInDB
21
+ from .models import Session, create_engine, create_tables, sessionmaker, utcnow
22
+ from .record import BufferList, Record, random_path
23
+ from .utils import dump_dict, load_dict
24
+
25
+ try:
26
+ default_record_port = int(os.getenv('QULAB_RECORD_PORT', 6789))
27
+ except:
28
+ default_record_port = 6789
29
+
30
+ if os.getenv('QULAB_RECORD_PATH'):
31
+ datapath = Path(os.getenv('QULAB_RECORD_PATH'))
32
+ else:
33
+ datapath = Path.home() / 'qulab' / 'data'
34
+ datapath.mkdir(parents=True, exist_ok=True)
35
+
36
+ namespace = uuid.uuid4()
37
+ record_cache = {}
38
+ buffer_list_cache = {}
39
+ CACHE_SIZE = 1024
40
+
41
+ pool = {}
42
+
43
+
44
+ class Request():
45
+ __slots__ = ['sock', 'identity', 'msg', 'method']
46
+
47
+ def __init__(self, sock, identity, msg):
48
+ self.sock = sock
49
+ self.identity = identity
50
+ self.msg = pickle.loads(msg)
51
+ self.method = self.msg.get('method', '')
52
+
53
+ def __repr__(self):
54
+ return f"Request({self.method})"
55
+
56
+
57
+ class Response():
58
+ pass
59
+
60
+
61
+ class ErrorResponse(Response):
62
+
63
+ def __init__(self, error):
64
+ self.error = error
65
+
66
+
67
+ async def reply(req, resp):
68
+ await req.sock.send_multipart([req.identity, pickle.dumps(resp)])
69
+
70
+
71
+ def clear_cache():
72
+ if len(record_cache) < CACHE_SIZE:
73
+ return
74
+
75
+ logger.debug(f"clear_cache record_cache: {len(record_cache)}")
76
+ for ((k, (t, r)),
77
+ i) in zip(sorted(record_cache.items(), key=lambda x: x[1][0]),
78
+ range(len(record_cache) - CACHE_SIZE)):
79
+ del record_cache[k]
80
+
81
+ logger.debug(f"clear_cache buffer_list_cache: {len(buffer_list_cache)}")
82
+ for ((k, (t, r)),
83
+ i) in zip(sorted(buffer_list_cache.items(), key=lambda x: x[1][0]),
84
+ range(len(buffer_list_cache) - CACHE_SIZE)):
85
+ del buffer_list_cache[k]
86
+ logger.debug(f"clear_cache done.")
87
+
88
+
89
+ def flush_cache():
90
+ logger.debug(f"flush_cache: {len(record_cache)}")
91
+ for k, (t, r) in record_cache.items():
92
+ r.flush()
93
+ logger.debug(f"flush_cache done.")
94
+
95
+
96
+ def get_local_record(session: Session, id: int, datapath: Path) -> Record:
97
+ logger.debug(f"get_local_record: {id}")
98
+ record_in_db = session.get(RecordInDB, id)
99
+ if record_in_db is None:
100
+ logger.debug(f"record not found: {id=}")
101
+ return None
102
+ record_in_db.atime = utcnow()
103
+
104
+ if record_in_db.file.endswith('.zip'):
105
+ logger.debug(f"load record from zip: {record_in_db.file}")
106
+ record = Record.load(datapath / 'objects' / record_in_db.file)
107
+ logger.debug(f"load record from zip done.")
108
+ return record
109
+
110
+ path = datapath / 'objects' / record_in_db.file
111
+ with open(path, 'rb') as f:
112
+ logger.debug(f"load record from file: {path}")
113
+ record = dill.load(f)
114
+ logger.debug(f"load record from file done.")
115
+ record.database = datapath
116
+ record._file = path
117
+ return record
118
+
119
+
120
+ def get_record(session: Session, id: int, datapath: Path) -> Record:
121
+ if id not in record_cache:
122
+ record = get_local_record(session, id, datapath)
123
+ else:
124
+ logger.debug(f"get_record from cache: {id=}")
125
+ record = record_cache[id][1]
126
+ clear_cache()
127
+ logger.debug(f"update lru time for record cache: {id=}")
128
+ record_cache[id] = time.time(), record
129
+ return record
130
+
131
+
132
+ def record_create(session: Session, description: dict, datapath: Path) -> int:
133
+ logger.debug(f"record_create: {description['app']}")
134
+ record = Record(None, datapath, description)
135
+ record_in_db = RecordInDB()
136
+ if 'app' in description:
137
+ record_in_db.app = description['app']
138
+ if 'tags' in description:
139
+ record_in_db.tags = [tag(session, t) for t in description['tags']]
140
+ record_in_db.file = '/'.join(record._file.parts[-4:])
141
+ record_in_db.config_id = description['config']
142
+ record._file = datapath / 'objects' / record_in_db.file
143
+ logger.debug(f"record_create generate random file: {record_in_db.file}")
144
+ session.add(record_in_db)
145
+ try:
146
+ session.commit()
147
+ logger.debug(f"record_create commited: record.id={record_in_db.id}")
148
+ record.id = record_in_db.id
149
+ clear_cache()
150
+ record_cache[record.id] = time.time(), record
151
+ return record.id
152
+ except:
153
+ logger.debug(f"record_create rollback")
154
+ session.rollback()
155
+ raise
156
+
157
+
158
+ def record_append(session: Session, record_id: int, level: int, step: int,
159
+ position: int, variables: dict, datapath: Path):
160
+ logger.debug(f"record_append: {record_id}")
161
+ record = get_record(session, record_id, datapath)
162
+ logger.debug(f"record_append: {record_id}, {level}, {step}, {position}")
163
+ record.append(level, step, position, variables)
164
+ logger.debug(f"record_append done.")
165
+ try:
166
+ logger.debug(f"record_append update SQL database.")
167
+ record_in_db = session.get(RecordInDB, record_id)
168
+ logger.debug(f"record_append get RecordInDB: {record_in_db}")
169
+ record_in_db.mtime = utcnow()
170
+ record_in_db.atime = utcnow()
171
+ logger.debug(f"record_append update RecordInDB: {record_in_db}")
172
+ session.commit()
173
+ logger.debug(f"record_append commited.")
174
+ except:
175
+ logger.debug(f"record_append rollback.")
176
+ session.rollback()
177
+ raise
178
+
179
+
180
+ def record_delete(session: Session, record_id: int, datapath: Path):
181
+ record = get_local_record(session, record_id, datapath)
182
+ record.delete()
183
+ record_in_db = session.get(RecordInDB, record_id)
184
+ session.delete(record_in_db)
185
+ session.commit()
186
+
187
+
188
+ @logger.catch(reraise=True)
189
+ async def handle(session: Session, request: Request, datapath: Path):
190
+
191
+ msg = request.msg
192
+
193
+ if request.method not in ['ping']:
194
+ logger.debug(f"handle: {request.method}")
195
+
196
+ match request.method:
197
+ case 'ping':
198
+ await reply(request, 'pong')
199
+ case 'bufferlist_iter':
200
+ logger.debug(f"bufferlist_iter: {msg}")
201
+ if msg['iter_id'] and msg['iter_id'] in buffer_list_cache:
202
+ it = buffer_list_cache[msg['iter_id']][1]
203
+ iter_id = msg['iter_id']
204
+ else:
205
+ iter_id = uuid.uuid3(namespace, str(time.time_ns())).bytes
206
+ record = get_record(session, msg['record_id'], datapath)
207
+ bufferlist = record.get(msg['key'], buffer_to_array=False)
208
+ if msg['slice']:
209
+ bufferlist._slice = msg['slice']
210
+ it = bufferlist.iter()
211
+ for _, _ in zip(range(msg['start']), it):
212
+ pass
213
+ current_time = time.time()
214
+ ret, end = [], False
215
+ while time.time() - current_time < 0.02:
216
+ try:
217
+ ret.append(next(it))
218
+ except StopIteration:
219
+ end = True
220
+ break
221
+ logger.debug(f"bufferlist_iter: {iter_id}, {end}")
222
+ await reply(request, (iter_id, ret, end))
223
+ logger.debug(f"reply bufferlist_iter: {iter_id}, {end}")
224
+ buffer_list_cache[iter_id] = time.time(), it
225
+ clear_cache()
226
+ case 'bufferlist_iter_exit':
227
+ logger.debug(f"bufferlist_iter_exit: {msg}")
228
+ try:
229
+ it = buffer_list_cache.pop(msg['iter_id'])[1]
230
+ it.throw(Exception)
231
+ except:
232
+ pass
233
+ clear_cache()
234
+ logger.debug(f"end bufferlist_iter_exit: {msg}")
235
+ case 'record_create':
236
+ logger.debug(f"record_create")
237
+ description = load_dict(msg['description'])
238
+ await reply(request, record_create(session, description, datapath))
239
+ logger.debug(f"reply record_create")
240
+ case 'record_append':
241
+ logger.debug(f"record_append")
242
+ record_append(session, msg['record_id'], msg['level'], msg['step'],
243
+ msg['position'], msg['variables'], datapath)
244
+ logger.debug(f"reply record_append")
245
+ case 'record_description':
246
+ record = get_record(session, msg['record_id'], datapath)
247
+ await reply(request, dill.dumps(record))
248
+ case 'record_getitem':
249
+ record = get_record(session, msg['record_id'], datapath)
250
+ await reply(request, record.get(msg['key'], buffer_to_array=False))
251
+ case 'record_keys':
252
+ record = get_record(session, msg['record_id'], datapath)
253
+ await reply(request, record.keys())
254
+ case 'record_query':
255
+ total, apps, table = query_record(session,
256
+ offset=msg.get('offset', 0),
257
+ limit=msg.get('limit', 10),
258
+ app=msg.get('app', None),
259
+ tags=msg.get('tags', ()),
260
+ before=msg.get('before', None),
261
+ after=msg.get('after', None))
262
+ await reply(request, (total, apps, table))
263
+ case 'record_get_tags':
264
+ record_in_db = session.get(RecordInDB, msg['record_id'])
265
+ await reply(request, [t.name for t in record_in_db.tags])
266
+ case 'record_remove_tags':
267
+ remove_tags(session, msg['record_id'], msg['tags'])
268
+ case 'record_add_tags':
269
+ update_tags(session, msg['record_id'], msg['tags'], True)
270
+ case 'record_replace_tags':
271
+ update_tags(session, msg['record_id'], msg['tags'], False)
272
+ case 'notebook_create':
273
+ notebook = create_notebook(session, msg['name'])
274
+ session.commit()
275
+ await reply(request, notebook.id)
276
+ case 'notebook_extend':
277
+ notebook = session.get(Notebook, msg['notebook_id'])
278
+ inputCells = msg.get('input_cells', [""])
279
+ try:
280
+ aready_saved = len(notebook.cells)
281
+ except:
282
+ aready_saved = 0
283
+ if len(inputCells) > aready_saved:
284
+ for cell in inputCells[aready_saved:]:
285
+ cell = create_cell(session, notebook, cell)
286
+ session.commit()
287
+ await reply(request, cell.id)
288
+ else:
289
+ await reply(request, None)
290
+ case 'notebook_history':
291
+ cell = session.get(Cell, msg['cell_id'])
292
+ if cell:
293
+ await reply(request, [
294
+ cell.input.text
295
+ for cell in cell.notebook.cells[1:cell.index + 2]
296
+ ])
297
+ else:
298
+ await reply(request, None)
299
+ case 'config_get':
300
+ config = get_config(session,
301
+ msg['config_id'],
302
+ base=datapath / 'objects')
303
+ session.commit()
304
+ await reply(request, config)
305
+ case 'config_update':
306
+ config = create_config(session,
307
+ msg['update'],
308
+ base=datapath / 'objects',
309
+ filename='/'.join(
310
+ random_path(datapath /
311
+ 'objects').parts[-4:]))
312
+ session.commit()
313
+ await reply(request, config.id)
314
+ case 'task_submit':
315
+ from .scan import Scan
316
+ finished = [(id, queried) for id, (task, queried) in pool.items()
317
+ if not isinstance(task, int) and task.finished()]
318
+ for id, queried in finished:
319
+ if not queried:
320
+ pool[id] = [pool[id].record.id, False]
321
+ else:
322
+ pool.pop(id)
323
+ description = dill.loads(msg['description'])
324
+ task = Scan()
325
+ task.description = description
326
+ task.start()
327
+ pool[task.id] = [task, False]
328
+ await reply(request, task.id)
329
+ case 'task_get_record_id':
330
+ task, queried = pool.get(msg['id'])
331
+ if isinstance(task, int):
332
+ await reply(request, task)
333
+ pool.pop(msg['id'])
334
+ else:
335
+ for _ in range(10):
336
+ if task.record:
337
+ await reply(request, task.record.id)
338
+ pool[msg['id']] = [task, True]
339
+ break
340
+ await asyncio.sleep(1)
341
+ else:
342
+ await reply(request, None)
343
+ case 'task_get_progress':
344
+ task, _ = pool.get(msg['id'])
345
+ if isinstance(task, int):
346
+ await reply(request, 1)
347
+ else:
348
+ await reply(request,
349
+ [(bar.n, bar.total) for bar in task._bar.values()])
350
+ case _:
351
+ logger.error(f"Unknown method: {msg['method']}")
352
+
353
+ if request.method not in ['ping']:
354
+ logger.debug(f"finished handle: {request.method}")
355
+
356
+
357
+ async def handle_with_timeout(session: Session, request: Request,
358
+ datapath: Path, timeout: float):
359
+ try:
360
+ await asyncio.wait_for(handle(session, request, datapath),
361
+ timeout=timeout)
362
+ except asyncio.TimeoutError:
363
+ logger.warning(
364
+ f"Task handling request {request} timed out and was cancelled.")
365
+ await reply(request, 'timeout')
366
+ except Exception as e:
367
+ logger.error(f"Task handling request {request} failed: {e!r}")
368
+ await reply(request, ErrorResponse(f'{e!r}'))
369
+ logger.debug(f"Task handling request {request} finished.")
370
+
371
+
372
+ async def serv(port,
373
+ datapath,
374
+ url='',
375
+ buffer_size=1024 * 1024 * 1024,
376
+ interval=60):
377
+ logger.debug('Creating socket...')
378
+ async with ZMQContextManager(zmq.ROUTER, bind=f"tcp://*:{port}") as sock:
379
+ logger.info(f'Server started at port {port}.')
380
+ logger.info(f'Data path: {datapath}.')
381
+ if not url or url == 'sqlite':
382
+ url = 'sqlite:///' + str(datapath / 'data.db')
383
+ engine = create_engine(url)
384
+ create_tables(engine)
385
+ Session = sessionmaker(engine)
386
+ with Session() as session:
387
+ logger.info(f'Database connected: {url}.')
388
+ received = 0
389
+ last_flush_time = time.time()
390
+ while True:
391
+ logger.debug('Waiting for request...')
392
+ identity, msg = await sock.recv_multipart()
393
+ logger.debug('Received request.')
394
+ received += len(msg)
395
+ try:
396
+ req = Request(sock, identity, msg)
397
+ except Exception as e:
398
+ logger.exception('bad request')
399
+ await sock.send_multipart(
400
+ [identity,
401
+ pickle.dumps(ErrorResponse(f'{e!r}'))])
402
+ continue
403
+ asyncio.create_task(
404
+ handle_with_timeout(session, req, datapath,
405
+ timeout=3600.0))
406
+ if received > buffer_size or time.time(
407
+ ) - last_flush_time > interval:
408
+ flush_cache()
409
+ received = 0
410
+ last_flush_time = time.time()
411
+
412
+
413
+ async def main(port,
414
+ datapath,
415
+ url,
416
+ timeout=1,
417
+ buffer=1024,
418
+ interval=60,
419
+ log='stderr',
420
+ no_watch=True,
421
+ debug=False):
422
+ if no_watch:
423
+ logger.remove()
424
+ if debug:
425
+ level = 'DEBUG'
426
+ else:
427
+ level = 'INFO'
428
+ if log == 'stderr':
429
+ logger.add(sys.stderr, level=level)
430
+ elif log == 'stdout':
431
+ logger.add(sys.stdout, level=level)
432
+ else:
433
+ logger.add(sys.stderr, level=level)
434
+ logger.add(log, level=level)
435
+ logger.debug(f"logging level: {level}")
436
+ logger.info('Server starting...')
437
+ await serv(port, datapath, url, buffer * 1024 * 1024, interval)
438
+ else:
439
+ process = None
440
+
441
+ while True:
442
+ try:
443
+ with ZMQContextManager(
444
+ zmq.DEALER, connect=f"tcp://127.0.0.1:{port}") as sock:
445
+ sock.setsockopt(zmq.LINGER, 0)
446
+ sock.send_pyobj({"method": "ping"})
447
+ logger.debug('ping.')
448
+ if sock.poll(int(1000 * timeout)):
449
+ sock.recv()
450
+ logger.debug('recv pong.')
451
+ else:
452
+ logger.debug('timeout.')
453
+ raise asyncio.TimeoutError()
454
+ except (zmq.error.ZMQError, asyncio.TimeoutError):
455
+ if process is not None:
456
+ logger.debug(
457
+ f'killing process... PID={process.pid}, returncode={process.returncode}'
458
+ )
459
+ process.kill()
460
+ logger.debug(
461
+ f'killed process. PID={process.pid}, returncode={process.returncode}'
462
+ )
463
+ cmd = [
464
+ sys.executable,
465
+ "-m",
466
+ "qulab",
467
+ "server",
468
+ "--port",
469
+ f"{port}",
470
+ "--datapath",
471
+ f"{datapath}",
472
+ "--url",
473
+ f"{url}",
474
+ "--timeout",
475
+ f"{timeout}",
476
+ "--buffer",
477
+ f"{buffer}",
478
+ "--interval",
479
+ f"{interval}",
480
+ "--log",
481
+ f"{log}",
482
+ ]
483
+ if url:
484
+ cmd.extend(['--url', url])
485
+ if debug:
486
+ cmd.append('--debug')
487
+ cmd.append("--no-watch")
488
+ logger.debug(f"starting process: {' '.join(cmd)}")
489
+ process = subprocess.Popen(cmd, cwd=os.getcwd())
490
+ logger.debug(
491
+ f'process started. PID={process.pid}, returncode={process.returncode}'
492
+ )
493
+
494
+ # Capture and log the output
495
+ # stdout, stderr = process.communicate(timeout=5)
496
+ # if stdout:
497
+ # logger.info(f'Server stdout: {stdout.decode()}')
498
+ # if stderr:
499
+ # logger.error(f'Server stderr: {stderr.decode()}')
500
+
501
+ await asyncio.sleep(5)
502
+ await asyncio.sleep(timeout)
503
+
504
+
505
+ @click.command()
506
+ @click.option('--port',
507
+ default=os.getenv('QULAB_RECORD_PORT', 6789),
508
+ help='Port of the server.')
509
+ @click.option('--datapath', default=datapath, help='Path of the data.')
510
+ @click.option('--url', default='sqlite', help='URL of the database.')
511
+ @click.option('--timeout', default=1, help='Timeout of ping.')
512
+ @click.option('--buffer', default=1024, help='Buffer size (MB).')
513
+ @click.option('--interval',
514
+ default=60,
515
+ help='Interval of flush cache, in unit of second.')
516
+ @click.option('--log', default='stderr', help='Log file.')
517
+ @click.option('--no-watch', is_flag=True, help='Watch the server.')
518
+ @click.option('--debug', is_flag=True, help='Debug mode.')
519
+ def server(port, datapath, url, timeout, buffer, interval, log, no_watch,
520
+ debug):
521
+ try:
522
+ import uvloop
523
+ uvloop.run(
524
+ main(port, Path(datapath), url, timeout, buffer, interval, log,
525
+ True, debug))
526
+ except ImportError:
527
+ asyncio.run(
528
+ main(port, Path(datapath), url, timeout, buffer, interval, log,
529
+ True, debug))
530
+
531
+
532
+ if __name__ == "__main__":
533
+ server()