QuLab 2.10.10__cp313-cp313-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (107) hide show
  1. qulab/__init__.py +33 -0
  2. qulab/__main__.py +4 -0
  3. qulab/cli/__init__.py +0 -0
  4. qulab/cli/commands.py +30 -0
  5. qulab/cli/config.py +170 -0
  6. qulab/cli/decorators.py +28 -0
  7. qulab/dicttree.py +523 -0
  8. qulab/executor/__init__.py +5 -0
  9. qulab/executor/analyze.py +188 -0
  10. qulab/executor/cli.py +434 -0
  11. qulab/executor/load.py +563 -0
  12. qulab/executor/registry.py +185 -0
  13. qulab/executor/schedule.py +543 -0
  14. qulab/executor/storage.py +615 -0
  15. qulab/executor/template.py +259 -0
  16. qulab/executor/utils.py +194 -0
  17. qulab/expression.py +827 -0
  18. qulab/fun.cp313-win_amd64.pyd +0 -0
  19. qulab/monitor/__init__.py +1 -0
  20. qulab/monitor/__main__.py +8 -0
  21. qulab/monitor/config.py +41 -0
  22. qulab/monitor/dataset.py +77 -0
  23. qulab/monitor/event_queue.py +54 -0
  24. qulab/monitor/mainwindow.py +234 -0
  25. qulab/monitor/monitor.py +115 -0
  26. qulab/monitor/ploter.py +123 -0
  27. qulab/monitor/qt_compat.py +16 -0
  28. qulab/monitor/toolbar.py +265 -0
  29. qulab/scan/__init__.py +2 -0
  30. qulab/scan/curd.py +221 -0
  31. qulab/scan/models.py +554 -0
  32. qulab/scan/optimize.py +76 -0
  33. qulab/scan/query.py +387 -0
  34. qulab/scan/record.py +603 -0
  35. qulab/scan/scan.py +1166 -0
  36. qulab/scan/server.py +450 -0
  37. qulab/scan/space.py +213 -0
  38. qulab/scan/utils.py +234 -0
  39. qulab/storage/__init__.py +0 -0
  40. qulab/storage/__main__.py +51 -0
  41. qulab/storage/backend/__init__.py +0 -0
  42. qulab/storage/backend/redis.py +204 -0
  43. qulab/storage/base_dataset.py +352 -0
  44. qulab/storage/chunk.py +60 -0
  45. qulab/storage/dataset.py +127 -0
  46. qulab/storage/file.py +273 -0
  47. qulab/storage/models/__init__.py +22 -0
  48. qulab/storage/models/base.py +4 -0
  49. qulab/storage/models/config.py +28 -0
  50. qulab/storage/models/file.py +89 -0
  51. qulab/storage/models/ipy.py +58 -0
  52. qulab/storage/models/models.py +88 -0
  53. qulab/storage/models/record.py +161 -0
  54. qulab/storage/models/report.py +22 -0
  55. qulab/storage/models/tag.py +93 -0
  56. qulab/storage/storage.py +95 -0
  57. qulab/sys/__init__.py +2 -0
  58. qulab/sys/chat.py +688 -0
  59. qulab/sys/device/__init__.py +3 -0
  60. qulab/sys/device/basedevice.py +255 -0
  61. qulab/sys/device/loader.py +86 -0
  62. qulab/sys/device/utils.py +79 -0
  63. qulab/sys/drivers/FakeInstrument.py +68 -0
  64. qulab/sys/drivers/__init__.py +0 -0
  65. qulab/sys/ipy_events.py +125 -0
  66. qulab/sys/net/__init__.py +0 -0
  67. qulab/sys/net/bencoder.py +205 -0
  68. qulab/sys/net/cli.py +169 -0
  69. qulab/sys/net/dhcp.py +543 -0
  70. qulab/sys/net/dhcpd.py +176 -0
  71. qulab/sys/net/kad.py +1142 -0
  72. qulab/sys/net/kcp.py +192 -0
  73. qulab/sys/net/nginx.py +194 -0
  74. qulab/sys/progress.py +190 -0
  75. qulab/sys/rpc/__init__.py +0 -0
  76. qulab/sys/rpc/client.py +0 -0
  77. qulab/sys/rpc/exceptions.py +96 -0
  78. qulab/sys/rpc/msgpack.py +1052 -0
  79. qulab/sys/rpc/msgpack.pyi +41 -0
  80. qulab/sys/rpc/router.py +35 -0
  81. qulab/sys/rpc/rpc.py +412 -0
  82. qulab/sys/rpc/serialize.py +139 -0
  83. qulab/sys/rpc/server.py +29 -0
  84. qulab/sys/rpc/socket.py +29 -0
  85. qulab/sys/rpc/utils.py +25 -0
  86. qulab/sys/rpc/worker.py +0 -0
  87. qulab/sys/rpc/zmq_socket.py +227 -0
  88. qulab/tools/__init__.py +0 -0
  89. qulab/tools/connection_helper.py +39 -0
  90. qulab/typing.py +2 -0
  91. qulab/utils.py +95 -0
  92. qulab/version.py +1 -0
  93. qulab/visualization/__init__.py +188 -0
  94. qulab/visualization/__main__.py +71 -0
  95. qulab/visualization/_autoplot.py +464 -0
  96. qulab/visualization/plot_circ.py +319 -0
  97. qulab/visualization/plot_layout.py +408 -0
  98. qulab/visualization/plot_seq.py +242 -0
  99. qulab/visualization/qdat.py +152 -0
  100. qulab/visualization/rot3d.py +23 -0
  101. qulab/visualization/widgets.py +86 -0
  102. qulab-2.10.10.dist-info/METADATA +110 -0
  103. qulab-2.10.10.dist-info/RECORD +107 -0
  104. qulab-2.10.10.dist-info/WHEEL +5 -0
  105. qulab-2.10.10.dist-info/entry_points.txt +2 -0
  106. qulab-2.10.10.dist-info/licenses/LICENSE +21 -0
  107. qulab-2.10.10.dist-info/top_level.txt +1 -0
@@ -0,0 +1,352 @@
1
+ import bisect
2
+ from concurrent.futures import Future
3
+ from datetime import datetime
4
+ from itertools import chain
5
+ from multiprocessing import Lock
6
+ from queue import Queue
7
+ from typing import Any, Sequence
8
+
9
+ from ..scan.base import StepStatus, Tracker, _get_all_dependence
10
+
11
+ _NODEFAULT = object()
12
+
13
+
14
+ class BaseDataset(Tracker):
15
+ """
16
+ A tracker that stores the results of the steps.
17
+
18
+ Parameters
19
+ ----------
20
+ data : dict
21
+ The data of the results.
22
+ shape : tuple
23
+ The shape of the results.
24
+ ctime : datetime.datetime
25
+ The creation time of the tracker.
26
+ mtime : datetime.datetime
27
+ The modification time of the tracker.
28
+ """
29
+
30
+ def __init__(
31
+ self,
32
+ data: dict = None,
33
+ shape: tuple = (),
34
+ save_kwds: bool | Sequence[str] = True,
35
+ frozen_keys: tuple = (),
36
+ ignores: tuple = (),
37
+ ):
38
+ self.ctime = datetime.utcnow()
39
+ self.mtime = datetime.utcnow()
40
+ self.data = data if data is not None else {}
41
+ self.cache = {}
42
+ self.pos = {}
43
+ self.timestamps = {}
44
+ self.iteration = {}
45
+ self._init_keys = list(self.data.keys())
46
+ self._frozen_keys = frozen_keys
47
+ self._ignores = ignores
48
+ self._key_levels = ()
49
+ self.depends = {}
50
+ self.dims = {}
51
+ self.vars_dims = {}
52
+ self.shape = shape
53
+ self.count = 0
54
+ self.save_kwds = save_kwds
55
+ self.queue = Queue()
56
+ self._queue_buffer = None
57
+ self._lock = Lock()
58
+
59
+ def init(self, loops: dict, functions: dict, constants: dict, graph: dict,
60
+ order: list):
61
+ """
62
+ Initialize the tracker.
63
+
64
+ Parameters
65
+ ----------
66
+ loops : dict
67
+ The map of iterables.
68
+ functions : dict
69
+ The map of functions.
70
+ constants : dict
71
+ The map of constants.
72
+ graph : dict
73
+ The dependence graph.
74
+ order : list
75
+ The order of the dependence graph.
76
+ """
77
+ from numpy import ndarray
78
+
79
+ self.depends = graph
80
+
81
+ for level, (keys, iters) in enumerate(loops.items()):
82
+ self._key_levels = self._key_levels + ((keys, level), )
83
+ if isinstance(keys, str):
84
+ keys = (keys, )
85
+ iters = (iters, )
86
+ if (len(keys) > 1 and len(iters) == 1
87
+ and isinstance(iters[0], ndarray) and iters[0].ndim == 2
88
+ and iters[0].shape[1] == len(keys)):
89
+ iters = iters[0]
90
+ for i, key in enumerate(keys):
91
+ self.data[key] = iters[:, i]
92
+ self._frozen_keys = self._frozen_keys + (key, )
93
+ self._init_keys.append(key)
94
+ continue
95
+ if not isinstance(iters, tuple) or len(keys) != len(iters):
96
+ continue
97
+ for key, iter in zip(keys, iters):
98
+ if self.depends.get(key, set()):
99
+ dims = set()
100
+ for dep in self.depends[key]:
101
+ if dep in self.vars_dims:
102
+ dims.update(set(self.vars_dims[dep]))
103
+ dims.add(level)
104
+ self.vars_dims[key] = tuple(sorted(dims))
105
+ else:
106
+ self.vars_dims[key] = (level, )
107
+ if level not in self.dims:
108
+ self.dims[level] = ()
109
+ self.dims[level] = self.dims[level] + (key, )
110
+ if key not in self.data and isinstance(iter,
111
+ (list, range, ndarray)):
112
+ if key.startswith('__'):
113
+ continue
114
+ self.data[key] = iter
115
+ self._frozen_keys = self._frozen_keys + (key, )
116
+ self._init_keys.append(key)
117
+
118
+ for key, value in constants.items():
119
+ if key.startswith('__'):
120
+ continue
121
+ self.data[key] = value
122
+ self._init_keys.append(key)
123
+ self.vars_dims[key] = ()
124
+
125
+ for ready in order:
126
+ for key in ready:
127
+ if key in functions:
128
+ deps = _get_all_dependence(key, graph)
129
+ dims = set()
130
+ for k in deps:
131
+ dims.update(set(self.vars_dims.get(k, ())))
132
+ self.vars_dims[key] = tuple(sorted(dims))
133
+
134
+ for k, v in self.vars_dims.items():
135
+ if len(v) == 1:
136
+ if v[0] in self.dims and k not in self.dims[v[0]]:
137
+ self.dims[v[0]] = self.dims[v[0]] + (k, )
138
+ elif v[0] not in self.dims:
139
+ self.dims[v[0]] = (k, )
140
+
141
+ def feed(self,
142
+ step: StepStatus,
143
+ dataframe: dict | Future,
144
+ store=False,
145
+ **options):
146
+ """
147
+ Feed the results of the step to the dataset.
148
+
149
+ Parameters
150
+ ----------
151
+ step : StepStatus
152
+ The step.
153
+ dataframe : dict
154
+ The results of the step.
155
+ """
156
+ import numpy as np
157
+
158
+ if not store:
159
+ return
160
+ self.mtime = datetime.utcnow()
161
+ if not self.shape:
162
+ self.shape = tuple([i + 1 for i in step.pos])
163
+ else:
164
+ self.shape = tuple(
165
+ [max(i + 1, j) for i, j in zip(step.pos, self.shape)])
166
+ if self.save_kwds:
167
+ if isinstance(self.save_kwds, bool):
168
+ kwds = step.kwds
169
+ else:
170
+ kwds = {
171
+ key: step.kwds.get(key, np.nan)
172
+ for key in self.save_kwds
173
+ }
174
+ else:
175
+ kwds = {}
176
+
177
+ if isinstance(dataframe, dict):
178
+ dataframe = self._prune(dataframe)
179
+ self.queue.put_nowait(
180
+ (step.iteration, step.pos, dataframe, kwds, self.mtime))
181
+ self.flush()
182
+
183
+ def _prune(self, dataframe: dict[str, Any]) -> dict[str, Any]:
184
+ return {
185
+ k: v
186
+ for k, v in dataframe.items() if k not in self._ignores
187
+ and k not in self._frozen_keys and not k.startswith('__')
188
+ }
189
+
190
+ def _append(self, iteration, pos, dataframe, kwds, now):
191
+ for k, v in chain(kwds.items(), dataframe.items()):
192
+ if k in self._frozen_keys or k in self._ignores:
193
+ continue
194
+ if k.startswith('__'):
195
+ continue
196
+ if self.vars_dims.get(k, ()) == () and k not in dataframe:
197
+ continue
198
+ self.count += 1
199
+ if k not in self.data:
200
+ self.data[k] = [v]
201
+ if k in self.vars_dims:
202
+ self.pos[k] = tuple([pos[i]] for i in self.vars_dims[k])
203
+ else:
204
+ self.pos[k] = tuple([i] for i in pos)
205
+ self.timestamps[k] = [now.timestamp()]
206
+ self.iteration[k] = [iteration]
207
+ else:
208
+ if k in self.vars_dims:
209
+ pos_k = tuple(pos[i] for i in self.vars_dims[k])
210
+ if k not in dataframe and pos_k in zip(*self.pos[k]):
211
+ continue
212
+ for i, l in zip(pos_k, self.pos[k]):
213
+ l.append(i)
214
+ else:
215
+ for i, l in zip(pos, self.pos[k]):
216
+ l.append(i)
217
+ self.timestamps[k].append(now.timestamp())
218
+ self.iteration[k].append(iteration)
219
+ self.data[k].append(v)
220
+
221
+ def flush(self, block=False):
222
+ with self._lock:
223
+ self._flush(block=block)
224
+
225
+ def _dataframe_done(self, dataframe: Future | dict) -> bool:
226
+ if isinstance(dataframe, Future):
227
+ return dataframe.done()
228
+ else:
229
+ return all(x.done() for x in dataframe.values()
230
+ if isinstance(x, Future))
231
+
232
+ def _dataframe_result(self, dataframe: Future | dict) -> dict:
233
+ if isinstance(dataframe, Future):
234
+ return dataframe.result()
235
+ else:
236
+ return {
237
+ k: v.result() if isinstance(v, Future) else v
238
+ for k, v in dataframe.items()
239
+ }
240
+
241
+ def _flush(self, block=False):
242
+ if self._queue_buffer is not None:
243
+ iteration, pos, dataframe, kwds, now = self._queue_buffer
244
+ if self._dataframe_done(dataframe) or block:
245
+ self._append(iteration, pos,
246
+ self._prune(self._dataframe_result(dataframe)),
247
+ kwds, now)
248
+ self._queue_buffer = None
249
+ else:
250
+ return
251
+ while not self.queue.empty():
252
+ iteration, pos, dataframe, kwds, now = self.queue.get()
253
+ if not self._dataframe_done(dataframe) and not block:
254
+ self._queue_buffer = (iteration, pos, dataframe, kwds, now)
255
+ return
256
+ else:
257
+ self._append(iteration, pos,
258
+ self._prune(self._dataframe_result(dataframe)),
259
+ kwds, now)
260
+
261
+ def _get_array(self, key, shape, count):
262
+ import numpy as np
263
+
264
+ if key in self.vars_dims:
265
+ shape = tuple([shape[i] for i in self.vars_dims[key]])
266
+
267
+ data, data_shape, data_count = self.cache.get(key, (None, (), 0))
268
+ if (data_shape, data_count) == (shape, count):
269
+ return data
270
+ try:
271
+ tmp = np.asarray(self.data[key])
272
+ if data_shape != shape:
273
+ data = np.full(shape + tmp.shape[1:], np.nan, dtype=tmp.dtype)
274
+ except:
275
+ tmp = self.data[key]
276
+ if data_shape != shape:
277
+ data = np.full(shape, np.nan, dtype=object)
278
+ try:
279
+ data[self.pos[key]] = tmp
280
+ except:
281
+ raise
282
+ self.cache[key] = (data, shape, count)
283
+ return data
284
+
285
+ def _get_part(self, key, skip):
286
+ i = bisect.bisect_left(self.iteration[key], skip)
287
+ pos = tuple(p[i:] for p in self.pos[key])
288
+ iteration = self.iteration[key][i:]
289
+ data = self.data[key][i:]
290
+ return data, iteration, pos
291
+
292
+ def keys(self):
293
+ """
294
+ Get the keys of the dataset.
295
+ """
296
+ self.flush()
297
+ return self.data.keys()
298
+
299
+ def values(self):
300
+ """
301
+ Get the values of the dataset.
302
+ """
303
+ self.flush()
304
+ return [self[k] for k in self.data]
305
+
306
+ def items(self):
307
+ """
308
+ Get the items of the dataset.
309
+ """
310
+ self.flush()
311
+ return list(zip(self.keys(), self.values()))
312
+
313
+ def get(self, key, default=_NODEFAULT, skip=None, block=False):
314
+ """
315
+ Get the value of the dataset.
316
+ """
317
+ self.flush(block)
318
+ if key in self._init_keys:
319
+ return self.data[key]
320
+ elif key in self.data:
321
+ if skip is None:
322
+ return self._get_array(key, self.shape, self.count)
323
+ else:
324
+ return self._get_part(key, skip)
325
+ elif default is _NODEFAULT:
326
+ raise KeyError(key)
327
+ else:
328
+ return default
329
+
330
+ def __getitem__(self, key):
331
+ return self.get(key)
332
+
333
+ def __getstate__(self):
334
+ self.flush()
335
+ data = dict(self.items())
336
+ return {
337
+ 'data': data,
338
+ 'pos': self.pos,
339
+ 'timestamps': self.timestamps,
340
+ 'iteration': self.iteration,
341
+ 'depends': self.depends,
342
+ 'shape': self.shape,
343
+ 'dims': self.dims,
344
+ 'vars_dims': self.vars_dims,
345
+ 'ctime': self.ctime,
346
+ 'mtime': self.mtime,
347
+ '_init_keys': self._init_keys,
348
+ '_frozen_keys': self._frozen_keys,
349
+ '_ignores': self._ignores,
350
+ '_key_levels': self._key_levels,
351
+ 'save_kwds': self.save_kwds,
352
+ }
qulab/storage/chunk.py ADDED
@@ -0,0 +1,60 @@
1
+ import hashlib
2
+ import zlib
3
+ from pathlib import Path
4
+
5
+ DATAPATH = Path.home() / 'data'
6
+ CHUNKSIZE = 1024 * 1024 * 4 # 4 MB
7
+
8
+
9
+ def set_data_path(base_path: str) -> None:
10
+ global DATAPATH
11
+ DATAPATH = Path(base_path)
12
+
13
+
14
+ def get_data_path() -> Path:
15
+ return DATAPATH
16
+
17
+
18
+ def save_chunk(data: bytes, compressed: bool = False) -> tuple[str, str]:
19
+ if compressed:
20
+ data = zlib.compress(data)
21
+ hashstr = hashlib.sha1(data).hexdigest()
22
+ file = get_data_path(
23
+ ) / 'chunks' / hashstr[:2] / hashstr[2:4] / hashstr[4:]
24
+ file.parent.mkdir(parents=True, exist_ok=True)
25
+ with open(file, 'wb') as f:
26
+ f.write(data)
27
+ return str('/'.join(file.parts[-4:])), len(data)
28
+
29
+
30
+ def load_chunk(file: str, compressed: bool = False) -> bytes:
31
+ if file.startswith('chunks/'):
32
+ with open(get_data_path() / file, 'rb') as f:
33
+ data = f.read()
34
+ elif file.startswith('packs/'):
35
+ *filepath, start, size = file.split('/')
36
+ filepath = '/'.join(filepath)
37
+ with open(get_data_path() / filepath, 'rb') as f:
38
+ f.seek(int(start))
39
+ data = f.read(int(size))
40
+ else:
41
+ raise ValueError('Invalid file path: ' + file)
42
+ if compressed:
43
+ data = zlib.decompress(data)
44
+ return data
45
+
46
+
47
+ def pack_chunk(pack: str, chunkfile: str) -> str:
48
+ pack = get_data_path() / 'packs' / pack
49
+ pack.parent.mkdir(parents=True, exist_ok=True)
50
+ with open(pack, 'ab') as f:
51
+ buf = load_chunk(chunkfile)
52
+ start = f.tell()
53
+ size = len(buf)
54
+ f.write(buf)
55
+ return str('/'.join(pack.parts[-2:])) + '/' + str(start) + '/' + str(size)
56
+
57
+
58
+ def delete_chunk(file: str):
59
+ file = get_data_path() / file
60
+ file.unlink()
@@ -0,0 +1,127 @@
1
+ import bisect
2
+ import pathlib
3
+ from itertools import chain
4
+
5
+ import dill
6
+
7
+ from .base_dataset import BaseDataset
8
+ from .file import ArrayFile, BinaryFile, ObjectFile, ObjectListFile, load
9
+
10
+
11
+ class Dataset(BaseDataset):
12
+
13
+ def __init__(self, uuid, storage):
14
+ super().__init__()
15
+ self.uuid = uuid
16
+ self.storage = storage
17
+ try:
18
+ obj = self.storage.get(self.uuid)
19
+ self.data = obj.data
20
+ self.pos = obj.pos
21
+ self.timestamps = obj.timestamps
22
+ self.iteration = obj.iteration
23
+ self.depends = obj.depends
24
+ self.shape = obj.shape
25
+ self.dims = obj.dims
26
+ self.vars_dims = obj.vars_dims
27
+ self.ctime = obj.ctime
28
+ self.mtime = obj.mtime
29
+ self._init_keys = obj._init_keys
30
+ self._frozen_keys = obj._frozen_keys
31
+ self._key_levels = obj._key_levels
32
+ self.save_kwds = obj.save_kwds
33
+ except:
34
+ pass
35
+
36
+ def unlink(self):
37
+ for file in pathlib.Path(self._path).iterdir():
38
+ if file.name.startswith('storage_'):
39
+ file.unlink()
40
+ elif file.name.startswith('pos_'):
41
+ file.unlink()
42
+ elif file.name.startswith('timestamp_'):
43
+ file.unlink()
44
+ elif file.name.startswith('iteration_'):
45
+ file.unlink()
46
+ elif file.name == 'index':
47
+ file.unlink()
48
+ try:
49
+ pathlib.Path(self._path).unlink()
50
+ except:
51
+ pass
52
+
53
+ def _append(self, iteration, pos, dataframe, kwds, now):
54
+ for k, v in chain(kwds.items(), dataframe.items()):
55
+ if k in self._frozen_keys:
56
+ continue
57
+ if k.startswith('__'):
58
+ continue
59
+ if self.vars_dims.get(k, ()) == () and k not in dataframe:
60
+ continue
61
+ self.count += 1
62
+ if k not in self.data:
63
+ self.data[k] = ArrayFile(self._path / f'storage_{k}', v)
64
+ if k in self.vars_dims:
65
+ self.pos[k] = tuple(
66
+ ArrayFile(self._path / f'pos_{k}_{j}', pos[i])
67
+ for j, i in enumerate(self.vars_dims[k]))
68
+ else:
69
+ self.pos[k] = tuple(
70
+ ArrayFile(self._path / f'pos_{k}_{j}', i)
71
+ for j, i in enumerate(pos))
72
+ self.timestamps[k] = ArrayFile(self._path / f'timestamp_{k}',
73
+ now.timestamp())
74
+ self.iteration[k] = ArrayFile(self._path / f'iteration_{k}',
75
+ iteration)
76
+ else:
77
+ if k in self.vars_dims:
78
+ pos_k = tuple(pos[i] for i in self.vars_dims[k])
79
+ if k not in dataframe and pos_k in zip(
80
+ *[list(l) for l in self.pos[k]]):
81
+ continue
82
+ for i, l in zip(pos_k, self.pos[k]):
83
+ l.append(i)
84
+ else:
85
+ for i, l in zip(pos, self.pos[k]):
86
+ l.append(i)
87
+ self.timestamps[k].append(now.timestamp())
88
+ self.iteration[k].append(iteration)
89
+ self.data[k].append(v)
90
+ with open(self._path / 'index', 'wb') as f:
91
+ dill.dump(self, f)
92
+
93
+ def _get_array(self, key, shape, count):
94
+ import numpy as np
95
+
96
+ if key in self.vars_dims:
97
+ shape = tuple([shape[i] for i in self.vars_dims[key]])
98
+
99
+ data, data_shape, data_count = self.cache.get(key, (None, (), 0))
100
+ if (data_shape, data_count) == (shape, count):
101
+ return data
102
+ try:
103
+ tmp = np.asarray(list(self.data[key]))
104
+ if data_shape != shape:
105
+ data = np.full(shape + tmp.shape[1:], np.nan, dtype=tmp.dtype)
106
+ except:
107
+ tmp = list(self.data[key])
108
+ if data_shape != shape:
109
+ data = np.full(shape, np.nan, dtype=object)
110
+ try:
111
+ pos = tuple([list(l) for l in self.pos[key]])
112
+ data[pos] = tmp
113
+ except:
114
+ print(key)
115
+ print(data)
116
+ print(pos)
117
+ print(tmp)
118
+ raise
119
+ self.cache[key] = (data, shape, count)
120
+ return data
121
+
122
+ def _get_part(self, key, skip):
123
+ i = bisect.bisect_left(list(self.iteration[key]), skip)
124
+ pos = tuple(list(p)[i:] for p in self.pos[key])
125
+ iteration = list(self.iteration[key])[i:]
126
+ data = list(self.data[key])[i:]
127
+ return data, iteration, pos