returnn 1.0.0__py3-none-any.whl → 1.20250204.4017__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of returnn might be problematic. Click here for more details.
- returnn/PKG-INFO +1 -1
- returnn/_setup_info_generated.py +2 -2
- returnn/datasets/hdf.py +14 -2
- returnn/datasets/normalization_data.py +4 -1
- returnn/datasets/raw_wav.py +2 -1
- returnn/datasets/stereo.py +2 -1
- returnn/util/basic.py +8 -1
- returnn/util/debug_helpers.py +2 -1
- {returnn-1.0.0.dist-info → returnn-1.20250204.4017.dist-info}/METADATA +1 -1
- {returnn-1.0.0.dist-info → returnn-1.20250204.4017.dist-info}/RECORD +13 -13
- {returnn-1.0.0.dist-info → returnn-1.20250204.4017.dist-info}/LICENSE +0 -0
- {returnn-1.0.0.dist-info → returnn-1.20250204.4017.dist-info}/WHEEL +0 -0
- {returnn-1.0.0.dist-info → returnn-1.20250204.4017.dist-info}/top_level.txt +0 -0
returnn/PKG-INFO
CHANGED
returnn/_setup_info_generated.py
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
version = '1.
|
|
2
|
-
long_version = '1.
|
|
1
|
+
version = '1.20250204.004017'
|
|
2
|
+
long_version = '1.20250204.004017+git.43f54b8'
|
returnn/datasets/hdf.py
CHANGED
|
@@ -3,18 +3,20 @@ Provides :class:`HDFDataset`.
|
|
|
3
3
|
"""
|
|
4
4
|
|
|
5
5
|
from __future__ import annotations
|
|
6
|
-
from typing import Union
|
|
6
|
+
from typing import TYPE_CHECKING, Union
|
|
7
7
|
import typing
|
|
8
8
|
import bisect
|
|
9
9
|
import collections
|
|
10
10
|
import gc
|
|
11
|
-
import h5py
|
|
12
11
|
import numpy
|
|
13
12
|
from .cached import CachedDataset
|
|
14
13
|
from .cached2 import CachedDataset2
|
|
15
14
|
from .basic import Dataset, DatasetSeq
|
|
16
15
|
from returnn.log import log
|
|
17
16
|
|
|
17
|
+
if TYPE_CHECKING:
|
|
18
|
+
import h5py
|
|
19
|
+
|
|
18
20
|
|
|
19
21
|
# Common attribute names for HDF dataset, which should be used in order to be proceed with HDFDataset class.
|
|
20
22
|
attr_seqLengths = "seqLengths"
|
|
@@ -82,6 +84,8 @@ class HDFDataset(CachedDataset):
|
|
|
82
84
|
Use load_seqs() to load the actual data.
|
|
83
85
|
:type filename: str
|
|
84
86
|
"""
|
|
87
|
+
import h5py
|
|
88
|
+
|
|
85
89
|
if self._use_cache_manager:
|
|
86
90
|
from returnn.util.basic import cf
|
|
87
91
|
|
|
@@ -660,6 +664,8 @@ class NextGenHDFDataset(CachedDataset2):
|
|
|
660
664
|
"""
|
|
661
665
|
:param str path:
|
|
662
666
|
"""
|
|
667
|
+
import h5py
|
|
668
|
+
|
|
663
669
|
self.files.append(path)
|
|
664
670
|
self.h5_files.append(h5py.File(path))
|
|
665
671
|
|
|
@@ -852,6 +858,8 @@ class SiameseHDFDataset(CachedDataset2):
|
|
|
852
858
|
|
|
853
859
|
:param str path: path to single .hdf file
|
|
854
860
|
"""
|
|
861
|
+
import h5py
|
|
862
|
+
|
|
855
863
|
self.files.append(path)
|
|
856
864
|
self.h5_files.append(h5py.File(path, "r"))
|
|
857
865
|
cur_file = self.h5_files[-1]
|
|
@@ -1092,6 +1100,7 @@ class SimpleHDFWriter:
|
|
|
1092
1100
|
import tempfile
|
|
1093
1101
|
import os
|
|
1094
1102
|
import shutil
|
|
1103
|
+
import h5py
|
|
1095
1104
|
|
|
1096
1105
|
if ndim is None:
|
|
1097
1106
|
if dim is None:
|
|
@@ -1170,6 +1179,7 @@ class SimpleHDFWriter:
|
|
|
1170
1179
|
:rtype: bool
|
|
1171
1180
|
"""
|
|
1172
1181
|
from returnn.util.basic import hdf5_strings
|
|
1182
|
+
import h5py
|
|
1173
1183
|
|
|
1174
1184
|
added_count = 0
|
|
1175
1185
|
for data_key, (dim, ndim, dtype) in extra_type.items():
|
|
@@ -1405,6 +1415,8 @@ class HDFDatasetWriter:
|
|
|
1405
1415
|
"""
|
|
1406
1416
|
:param str filename: for the HDF to write
|
|
1407
1417
|
"""
|
|
1418
|
+
import h5py
|
|
1419
|
+
|
|
1408
1420
|
print("Creating HDF dataset file %s" % filename, file=log.v3)
|
|
1409
1421
|
self.filename = filename
|
|
1410
1422
|
self.file = h5py.File(filename, "w")
|
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
import os
|
|
2
|
-
import h5py
|
|
3
2
|
import numpy as np
|
|
4
3
|
|
|
5
4
|
from .bundle_file import BundleFile
|
|
@@ -90,6 +89,8 @@ class NormalizationData:
|
|
|
90
89
|
:type dtype: numpy.dtype
|
|
91
90
|
:param dtype: type of data to use during calculations.
|
|
92
91
|
"""
|
|
92
|
+
import h5py
|
|
93
|
+
|
|
93
94
|
accumulatedSum = None
|
|
94
95
|
accumulatedSumOfSqr = None
|
|
95
96
|
totalFrames = long()
|
|
@@ -285,6 +286,8 @@ class NormalizationData:
|
|
|
285
286
|
e.g. total number of time frames, mean of squares etc.
|
|
286
287
|
However, this information is not read here.
|
|
287
288
|
"""
|
|
289
|
+
import h5py
|
|
290
|
+
|
|
288
291
|
if not os.path.isfile(self._normalizationFilePath):
|
|
289
292
|
raise IOError(self._normalizationFilePath + " does not exist")
|
|
290
293
|
with h5py.File(self._normalizationFilePath, mode="r") as f:
|
returnn/datasets/raw_wav.py
CHANGED
|
@@ -4,7 +4,6 @@ Provide :class:`RawWavDataset`.
|
|
|
4
4
|
|
|
5
5
|
from __future__ import annotations
|
|
6
6
|
|
|
7
|
-
import h5py
|
|
8
7
|
from .cached2 import CachedDataset2
|
|
9
8
|
from returnn.datasets.basic import DatasetSeq
|
|
10
9
|
from returnn.log import log
|
|
@@ -180,6 +179,8 @@ class RawWavDataset(CachedDataset2):
|
|
|
180
179
|
:rtype: (h5py._hl.file.File, string)
|
|
181
180
|
:return: (hdf buffer file handler, path to tmp file)
|
|
182
181
|
"""
|
|
182
|
+
import h5py
|
|
183
|
+
|
|
183
184
|
f_id, tmp_hdf_file_path = tempfile.mkstemp(suffix=".hdf")
|
|
184
185
|
file_handler = h5py.File(tmp_hdf_file_path, "w")
|
|
185
186
|
file_handler.create_group("timeSignal")
|
returnn/datasets/stereo.py
CHANGED
|
@@ -10,7 +10,6 @@ __author__ = "menne"
|
|
|
10
10
|
|
|
11
11
|
import os
|
|
12
12
|
import numpy as np
|
|
13
|
-
import h5py
|
|
14
13
|
from collections import deque
|
|
15
14
|
from .cached2 import CachedDataset2
|
|
16
15
|
from returnn.datasets.basic import DatasetSeq
|
|
@@ -181,6 +180,8 @@ class StereoHdfDataset(StereoDataset):
|
|
|
181
180
|
which should contain one path to an HDF file per line
|
|
182
181
|
:see: BundleFile.BundleFile
|
|
183
182
|
"""
|
|
183
|
+
import h5py
|
|
184
|
+
|
|
184
185
|
self._filePaths = []
|
|
185
186
|
self._fileHandlers = []
|
|
186
187
|
if hdfFile.endswith(".bundle"): # a bundle file containing a list of hdf files is given
|
returnn/util/basic.py
CHANGED
|
@@ -10,7 +10,6 @@ from typing import Optional, Union, Any, Generic, TypeVar, Iterable, Tuple, Dict
|
|
|
10
10
|
import subprocess
|
|
11
11
|
from subprocess import CalledProcessError
|
|
12
12
|
|
|
13
|
-
import h5py
|
|
14
13
|
from collections import deque
|
|
15
14
|
import inspect
|
|
16
15
|
import os
|
|
@@ -718,6 +717,8 @@ def hdf5_dimension(filename, dimension):
|
|
|
718
717
|
:param str dimension:
|
|
719
718
|
:rtype: numpy.ndarray|int
|
|
720
719
|
"""
|
|
720
|
+
import h5py
|
|
721
|
+
|
|
721
722
|
fin = h5py.File(filename, "r")
|
|
722
723
|
if "/" in dimension:
|
|
723
724
|
res = fin["/".join(dimension.split("/")[:-1])].attrs[dimension.split("/")[-1]]
|
|
@@ -733,6 +734,8 @@ def hdf5_group(filename, dimension):
|
|
|
733
734
|
:param str dimension:
|
|
734
735
|
:rtype: dict[str]
|
|
735
736
|
"""
|
|
737
|
+
import h5py
|
|
738
|
+
|
|
736
739
|
fin = h5py.File(filename, "r")
|
|
737
740
|
res = {k: fin[dimension].attrs[k] for k in fin[dimension].attrs}
|
|
738
741
|
fin.close()
|
|
@@ -745,6 +748,8 @@ def hdf5_shape(filename, dimension):
|
|
|
745
748
|
:param dimension:
|
|
746
749
|
:rtype: tuple[int]
|
|
747
750
|
"""
|
|
751
|
+
import h5py
|
|
752
|
+
|
|
748
753
|
fin = h5py.File(filename, "r")
|
|
749
754
|
res = fin[dimension].shape
|
|
750
755
|
fin.close()
|
|
@@ -757,6 +762,8 @@ def hdf5_strings(handle, name, data):
|
|
|
757
762
|
:param str name:
|
|
758
763
|
:param numpy.ndarray|list[str] data:
|
|
759
764
|
"""
|
|
765
|
+
import h5py
|
|
766
|
+
|
|
760
767
|
# noinspection PyBroadException
|
|
761
768
|
try:
|
|
762
769
|
s = max([len(d) for d in data])
|
returnn/util/debug_helpers.py
CHANGED
|
@@ -4,7 +4,6 @@ This file is going to be imported by Debug.debug_shell() and available as intera
|
|
|
4
4
|
|
|
5
5
|
import sys
|
|
6
6
|
import numpy
|
|
7
|
-
import h5py
|
|
8
7
|
|
|
9
8
|
|
|
10
9
|
def find_obj_in_stack(cls, stack=None, all_threads=True):
|
|
@@ -58,6 +57,8 @@ class SimpleHdf:
|
|
|
58
57
|
"""
|
|
59
58
|
|
|
60
59
|
def __init__(self, filename):
|
|
60
|
+
import h5py
|
|
61
|
+
|
|
61
62
|
self.hdf = h5py.File(filename)
|
|
62
63
|
self.seq_tag_to_idx = {name: i for (i, name) in enumerate(self.hdf["seqTags"])}
|
|
63
64
|
self.num_seqs = len(self.hdf["seqTags"])
|
|
@@ -1,9 +1,9 @@
|
|
|
1
|
-
returnn/PKG-INFO,sha256=
|
|
1
|
+
returnn/PKG-INFO,sha256=6uFK26GLuffKCi8KkSeMajnlYfmBu2P7QvW9ZahkRUQ,5213
|
|
2
2
|
returnn/__init__.py,sha256=biBtRsM0WZ406vShaeH-9WFoqJ8XwTbn6g0EeFJ7l8E,1012
|
|
3
3
|
returnn/__main__.py,sha256=qBFbuB1yN3adgVM5pXt2-Yq9vorjRNchNPL8kDKx44M,31752
|
|
4
4
|
returnn/__old_mod_loader__.py,sha256=nvsNY-xELdS_IPNkv66Q9Rmvg4dbGW0-EBRDcCmctos,7654
|
|
5
5
|
returnn/__setup__.py,sha256=22kQn2fh11iPM0hLb2Fy5sLmoU1JGvmDxXRYuRgQkwU,4659
|
|
6
|
-
returnn/_setup_info_generated.py,sha256=
|
|
6
|
+
returnn/_setup_info_generated.py,sha256=ufLm-mHrf71VDPXycVqyXd6vqpYzddFlQp1A-uq2RQI,77
|
|
7
7
|
returnn/config.py,sha256=3tmKhB6FnQZaNdtcYsiB61JnEY--iZ2qmJ4yq0b6tE0,29140
|
|
8
8
|
returnn/forward_iface.py,sha256=A_OJiaXsX4MlXQRzST86ylyxSUZbC402PQL1REcqHjM,911
|
|
9
9
|
returnn/learning_rate_control.py,sha256=ZvWryAn_tv9DhV8sh1LV3eE34Yltl3On3mYZAG4hR9s,34684
|
|
@@ -19,17 +19,17 @@ returnn/datasets/cached.py,sha256=DIRdWrxBmsZG8O_9eVxBO5mcdo4f5KU-Xb-4wVz59Io,25
|
|
|
19
19
|
returnn/datasets/cached2.py,sha256=STojLL2Ivvd0xMfZRlYgzsHKlikYKL-caZCIDCgc_9g,11773
|
|
20
20
|
returnn/datasets/distrib_files.py,sha256=kyqIQILDPAO2TXr39hjslmDxIAc3pkY1UOoj8nuiFXo,27534
|
|
21
21
|
returnn/datasets/generating.py,sha256=e2-SXcax7xQ4fkVW_Q5MgOLP6KlB7EQXJi_v64gVAWI,99805
|
|
22
|
-
returnn/datasets/hdf.py,sha256=
|
|
22
|
+
returnn/datasets/hdf.py,sha256=shif0aQqWWNJ0b6YnycpPjIVNsxjLrA41Y66-_SluGI,66993
|
|
23
23
|
returnn/datasets/lm.py,sha256=dP5VtKiIWyy9vNhlT7FddQvcwjA6CpbhYb6IGLGGtZc,96027
|
|
24
24
|
returnn/datasets/map.py,sha256=kOBJVZmwDhLsOplzDNByIfa0NRSUaMo2Lsy36lBvxrM,10907
|
|
25
25
|
returnn/datasets/meta.py,sha256=wHquywF1C7-YWhcSFSAdDNc0nEHRjE-ks7YIEuDFMIE,94731
|
|
26
26
|
returnn/datasets/multi_proc.py,sha256=7kppiXGiel824HM3GvHegluIxtiNAHafm-e6qh6W7YU,21948
|
|
27
|
-
returnn/datasets/normalization_data.py,sha256=
|
|
27
|
+
returnn/datasets/normalization_data.py,sha256=wOHrbO3612uWXpzLHHxksDw0qeVmQ42w7byBL9QMh9Q,14618
|
|
28
28
|
returnn/datasets/numpy_dump.py,sha256=c2Xgn8cfWxvRNCBMraMCRuHsbmjVQ05sISlaYWIRlKg,5150
|
|
29
29
|
returnn/datasets/postprocessing.py,sha256=G9QiMP3Qr0RmA1PL6fCXOUfa2e_iPzZq_Nfx_u7SNiI,19980
|
|
30
|
-
returnn/datasets/raw_wav.py,sha256
|
|
30
|
+
returnn/datasets/raw_wav.py,sha256=UyC4dUARb9QL0KOGhYdt96R2N_61JvFSvcyHMT8vMnw,9136
|
|
31
31
|
returnn/datasets/sprint.py,sha256=_RS3IFlI5sgkLmvPqvSirWCi7-yxys_m-EY232ec8sM,55446
|
|
32
|
-
returnn/datasets/stereo.py,sha256=
|
|
32
|
+
returnn/datasets/stereo.py,sha256=0Df0Omm4T4r60GEFa6sEvZdgkm6keEw-qcvIO4BoJew,17617
|
|
33
33
|
returnn/datasets/text_dict.py,sha256=BPE73nh6-vtSLy3SiDf4dpFl9RJorE7oO6l5y2FU3MI,9965
|
|
34
34
|
returnn/datasets/util/__init__.py,sha256=rEKhSD6fyhDiQF-x7dUQMwa29JZu72SDm7mYcCcLghY,52
|
|
35
35
|
returnn/datasets/util/feature_extraction.py,sha256=axtXDb9wcNpOmyhmW3WJUj5xda29TKkKvOcGGvq7ExA,23923
|
|
@@ -233,11 +233,11 @@ returnn/torch/util/gradient_checkpoint.py,sha256=iLy-FB65DC8O6LxzmMvFjnSdpIVpko8
|
|
|
233
233
|
returnn/torch/util/module.py,sha256=MXHIrF9Isu575DDJIa81212ULKwdqu1oOLxDVZecVSk,1693
|
|
234
234
|
returnn/torch/util/scaled_gradient.py,sha256=3585VuNypBty-pW6r3BKK047H3MqZQSdMjXeYAb4cmU,3192
|
|
235
235
|
returnn/util/__init__.py,sha256=UIG1qw4idqhW71BV60ha7h9PktxvEVcBIu0lYRossK8,336
|
|
236
|
-
returnn/util/basic.py,sha256=
|
|
236
|
+
returnn/util/basic.py,sha256=nhCfxWwGL7FchgFW5x9V2OgXD0HtpN885NASdwfeKYg,142339
|
|
237
237
|
returnn/util/better_exchook.py,sha256=MVMnuu6KoyqgvlMeQLQNTfdspcPR9MwigCXOpeTVqCI,62956
|
|
238
238
|
returnn/util/bpe.py,sha256=LWFhICZsEOnMwNws0lybPNzKRX6rSr8yKCvP65vjl9Y,19656
|
|
239
239
|
returnn/util/debug.py,sha256=wuRzdg9zB84WWCGyTjmRR_zYypu8gXxlc0nZ6si9OC8,28224
|
|
240
|
-
returnn/util/debug_helpers.py,sha256=
|
|
240
|
+
returnn/util/debug_helpers.py,sha256=0EINLK4uLtoSt5_kHs1M2NIFpMd0S7i4c4rx90U4fJk,2914
|
|
241
241
|
returnn/util/file_cache.py,sha256=JvJ4C7NFr8WpiIN0hLk3c33oX4-JfWSpchTjY7JGpCc,23127
|
|
242
242
|
returnn/util/fsa.py,sha256=k2lJ8tyf_g44Xk1EPVLwDwpP4spoMTqIigDVOWocQHY,59177
|
|
243
243
|
returnn/util/literal_py_to_pickle.py,sha256=3dnjWPeeiDT2xp4bRDgIf9yddx7b1AG7mOKEn_jiSl8,2173
|
|
@@ -253,8 +253,8 @@ returnn/util/sig_proc.py,sha256=Tjz0VOAVyqu2qDCF5HZ1JjALjcFsHcNkcd96WgZeKfE,7265
|
|
|
253
253
|
returnn/util/task_system.py,sha256=y4sMVXQ25Qd2z0rx03uOlXlkE-jbCYC1Sjfn-XlraVU,26003
|
|
254
254
|
returnn/util/train_proc_manager.py,sha256=Pjht28k6uz6BNQ47uW6Gf880iyq5q4wx7P_K2tmoAM8,3266
|
|
255
255
|
returnn/util/watch_memory.py,sha256=BR5P2kvBN6UI81cE0_1WAA6Hd1SByLbBaiDxvLhPOew,4213
|
|
256
|
-
returnn-1.
|
|
257
|
-
returnn-1.
|
|
258
|
-
returnn-1.
|
|
259
|
-
returnn-1.
|
|
260
|
-
returnn-1.
|
|
256
|
+
returnn-1.20250204.4017.dist-info/LICENSE,sha256=ywBD_U2aD4vpuoIgNAsjIGBYydl0tVKll3De0Z8s77c,11041
|
|
257
|
+
returnn-1.20250204.4017.dist-info/METADATA,sha256=6uFK26GLuffKCi8KkSeMajnlYfmBu2P7QvW9ZahkRUQ,5213
|
|
258
|
+
returnn-1.20250204.4017.dist-info/WHEEL,sha256=P9jw-gEje8ByB7_hXoICnHtVCrEwMQh-630tKvQWehc,91
|
|
259
|
+
returnn-1.20250204.4017.dist-info/top_level.txt,sha256=Lsn4WZc5Pbfk0-xDQOgnFCxOoqxL4CyeM3N1TFbJncw,8
|
|
260
|
+
returnn-1.20250204.4017.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|