returnn 1.20250318.201955__py3-none-any.whl → 1.20250403.110243__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of returnn might be problematic. Click here for more details.
- returnn/PKG-INFO +1 -1
- returnn/_setup_info_generated.py +2 -2
- returnn/torch/data/extern_data.py +1 -1
- returnn/util/basic.py +1 -1
- returnn/util/better_exchook.py +14 -6
- returnn/util/lru_cache.py +1 -1
- {returnn-1.20250318.201955.dist-info → returnn-1.20250403.110243.dist-info}/METADATA +1 -1
- {returnn-1.20250318.201955.dist-info → returnn-1.20250403.110243.dist-info}/RECORD +11 -11
- {returnn-1.20250318.201955.dist-info → returnn-1.20250403.110243.dist-info}/LICENSE +0 -0
- {returnn-1.20250318.201955.dist-info → returnn-1.20250403.110243.dist-info}/WHEEL +0 -0
- {returnn-1.20250318.201955.dist-info → returnn-1.20250403.110243.dist-info}/top_level.txt +0 -0
returnn/PKG-INFO
CHANGED
returnn/_setup_info_generated.py
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
version = '1.
|
|
2
|
-
long_version = '1.
|
|
1
|
+
version = '1.20250403.110243'
|
|
2
|
+
long_version = '1.20250403.110243+git.8b510ad'
|
|
@@ -132,7 +132,7 @@ def raw_dict_split_batch(
|
|
|
132
132
|
raise TypeError(f"got invalid value of type ({type(v).__name__}) for key {k!r}")
|
|
133
133
|
offset = 0
|
|
134
134
|
for i, split_size in enumerate(splits):
|
|
135
|
-
res[i][k] = v[offset : offset + split_size]
|
|
135
|
+
res[i][k] = v[offset : offset + split_size] if v.ndim > 0 else v
|
|
136
136
|
offset += split_size
|
|
137
137
|
for res_ in res:
|
|
138
138
|
res_: Dict[str, Union[torch.Tensor, numpy.ndarray]]
|
returnn/util/basic.py
CHANGED
|
@@ -590,7 +590,7 @@ class ReportImportedDevModules:
|
|
|
590
590
|
if path not in self.ignore_sys_path:
|
|
591
591
|
print("New sys.path entry:", path, file=log.v3)
|
|
592
592
|
has_changes = True
|
|
593
|
-
for mod_name, mod in sys.modules.items():
|
|
593
|
+
for mod_name, mod in list(sys.modules.items()):
|
|
594
594
|
if "." not in mod_name and mod_name not in self.ignore_sys_modules:
|
|
595
595
|
if hasattr(mod, "__file__") and mod.__file__:
|
|
596
596
|
# __file__ is e.g. ".../recipe/i6_experiments/__init__.py"
|
returnn/util/better_exchook.py
CHANGED
|
@@ -930,31 +930,36 @@ class _OutputLinesCollector:
|
|
|
930
930
|
self.lines = []
|
|
931
931
|
self.dom_term = DomTerm() if DomTerm.is_domterm() else None
|
|
932
932
|
|
|
933
|
-
def __call__(self, s1, s2=None, **kwargs):
|
|
933
|
+
def __call__(self, s1, s2=None, merge_into_prev=True, **kwargs):
|
|
934
934
|
"""
|
|
935
935
|
Adds to self.lines.
|
|
936
936
|
This strange function signature is for historical reasons.
|
|
937
937
|
|
|
938
938
|
:param str s1:
|
|
939
939
|
:param str|None s2:
|
|
940
|
+
:param bool merge_into_prev: if True and existing self.lines, merge into prev line.
|
|
940
941
|
:param kwargs: passed to self.color
|
|
941
942
|
"""
|
|
942
943
|
if kwargs:
|
|
943
944
|
s1 = self.color(s1, **kwargs)
|
|
944
945
|
if s2 is not None:
|
|
945
946
|
s1 = add_indent_lines(s1, s2)
|
|
946
|
-
self.lines
|
|
947
|
+
if merge_into_prev and self.lines:
|
|
948
|
+
self.lines[-1] += s1 + "\n"
|
|
949
|
+
else:
|
|
950
|
+
self.lines.append(s1 + "\n")
|
|
947
951
|
|
|
948
952
|
@contextlib.contextmanager
|
|
949
|
-
def fold_text_ctx(self, line):
|
|
953
|
+
def fold_text_ctx(self, line, merge_into_prev=True):
|
|
950
954
|
"""
|
|
951
955
|
Folds text, via :class:`DomTerm`, if available.
|
|
952
956
|
Notes that this temporarily overwrites self.lines.
|
|
953
957
|
|
|
954
958
|
:param str line: always visible
|
|
959
|
+
:param bool merge_into_prev: if True and existing self.lines, merge into prev line.
|
|
955
960
|
"""
|
|
956
961
|
if not self.dom_term:
|
|
957
|
-
self.__call__(line)
|
|
962
|
+
self.__call__(line, merge_into_prev=merge_into_prev)
|
|
958
963
|
yield
|
|
959
964
|
return
|
|
960
965
|
self.lines, old_lines = [], self.lines # overwrite self.lines
|
|
@@ -970,7 +975,10 @@ class _OutputLinesCollector:
|
|
|
970
975
|
line = line[1:]
|
|
971
976
|
self.dom_term.fold_text(line, hidden=hidden_text, file=output_buf, align=len(prefix))
|
|
972
977
|
output_text = prefix[1:] + output_buf.getvalue()
|
|
973
|
-
self.lines
|
|
978
|
+
if merge_into_prev and self.lines:
|
|
979
|
+
self.lines[-1] += output_text
|
|
980
|
+
else:
|
|
981
|
+
self.lines.append(output_text)
|
|
974
982
|
|
|
975
983
|
def _pp_extra_info(self, obj, depth_limit=3):
|
|
976
984
|
"""
|
|
@@ -1197,7 +1205,7 @@ def format_tb(
|
|
|
1197
1205
|
name,
|
|
1198
1206
|
]
|
|
1199
1207
|
)
|
|
1200
|
-
with output.fold_text_ctx(file_descr):
|
|
1208
|
+
with output.fold_text_ctx(file_descr, merge_into_prev=False):
|
|
1201
1209
|
source_code = get_source_code(filename, lineno, f.f_globals)
|
|
1202
1210
|
if source_code:
|
|
1203
1211
|
source_code = remove_indent_lines(replace_tab_indents(source_code)).rstrip()
|
returnn/util/lru_cache.py
CHANGED
|
@@ -1,9 +1,9 @@
|
|
|
1
|
-
returnn/PKG-INFO,sha256=
|
|
1
|
+
returnn/PKG-INFO,sha256=YrTbyS1dXOzjiS3hsHx0WiTGN2vC4405oTBhmzV9HFc,5215
|
|
2
2
|
returnn/__init__.py,sha256=biBtRsM0WZ406vShaeH-9WFoqJ8XwTbn6g0EeFJ7l8E,1012
|
|
3
3
|
returnn/__main__.py,sha256=qBFbuB1yN3adgVM5pXt2-Yq9vorjRNchNPL8kDKx44M,31752
|
|
4
4
|
returnn/__old_mod_loader__.py,sha256=nvsNY-xELdS_IPNkv66Q9Rmvg4dbGW0-EBRDcCmctos,7654
|
|
5
5
|
returnn/__setup__.py,sha256=22kQn2fh11iPM0hLb2Fy5sLmoU1JGvmDxXRYuRgQkwU,4659
|
|
6
|
-
returnn/_setup_info_generated.py,sha256=
|
|
6
|
+
returnn/_setup_info_generated.py,sha256=bNn7BZihUmzjFNY4h_ZJCi_eZ9gVkXH_bdTWycY64lk,77
|
|
7
7
|
returnn/config.py,sha256=3tmKhB6FnQZaNdtcYsiB61JnEY--iZ2qmJ4yq0b6tE0,29140
|
|
8
8
|
returnn/forward_iface.py,sha256=A_OJiaXsX4MlXQRzST86ylyxSUZbC402PQL1REcqHjM,911
|
|
9
9
|
returnn/learning_rate_control.py,sha256=ZvWryAn_tv9DhV8sh1LV3eE34Yltl3On3mYZAG4hR9s,34684
|
|
@@ -210,7 +210,7 @@ returnn/torch/distributed.py,sha256=skFyutdVztxgTEk3HHJ8S83qRWbNpkNT8Tj16Ic0_hE,
|
|
|
210
210
|
returnn/torch/engine.py,sha256=2FLLb2m4sWFwYOQGREDSxQCheCKd_osnFJCdLa_4TzE,76400
|
|
211
211
|
returnn/torch/updater.py,sha256=GqtBvZpElPVMm0lq84JPl4NVLFFETZAzAbR0rTomSao,28249
|
|
212
212
|
returnn/torch/data/__init__.py,sha256=6cLNEi8KoGI12PF6akN7mI_mtjlx-0hcQAfMYoExwik,132
|
|
213
|
-
returnn/torch/data/extern_data.py,sha256=
|
|
213
|
+
returnn/torch/data/extern_data.py,sha256=OSoy3x1KiyiJCr7DfF5uPFAu09We2N2WbA0yo-pYXxM,7601
|
|
214
214
|
returnn/torch/data/pipeline.py,sha256=mA6R1QU9vvRmfaUBvdqI9jQeIB3O-01ODcpmXs1SZ-w,29458
|
|
215
215
|
returnn/torch/data/queued_data_iter.py,sha256=PoOsGHdHVZjTmcyfq_ZOw--P6hyfTdmAWIRGq_Z_nLM,888
|
|
216
216
|
returnn/torch/data/returnn_dataset_wrapper.py,sha256=2CaDapzrlqahANuq-nyVAtv5ENHuM8A7okORwYJDisg,8006
|
|
@@ -233,15 +233,15 @@ returnn/torch/util/gradient_checkpoint.py,sha256=iLy-FB65DC8O6LxzmMvFjnSdpIVpko8
|
|
|
233
233
|
returnn/torch/util/module.py,sha256=MXHIrF9Isu575DDJIa81212ULKwdqu1oOLxDVZecVSk,1693
|
|
234
234
|
returnn/torch/util/scaled_gradient.py,sha256=3585VuNypBty-pW6r3BKK047H3MqZQSdMjXeYAb4cmU,3192
|
|
235
235
|
returnn/util/__init__.py,sha256=UIG1qw4idqhW71BV60ha7h9PktxvEVcBIu0lYRossK8,336
|
|
236
|
-
returnn/util/basic.py,sha256=
|
|
237
|
-
returnn/util/better_exchook.py,sha256=
|
|
236
|
+
returnn/util/basic.py,sha256=rzTfLAkX5IdC-L3xImNrSTRtB1dWRMHzrCridqrWXKI,142386
|
|
237
|
+
returnn/util/better_exchook.py,sha256=TAtb_ZyM-357UnOg_HMoBZUSxzt0WPgumlvprmlCprA,63921
|
|
238
238
|
returnn/util/bpe.py,sha256=LWFhICZsEOnMwNws0lybPNzKRX6rSr8yKCvP65vjl9Y,19656
|
|
239
239
|
returnn/util/debug.py,sha256=wuRzdg9zB84WWCGyTjmRR_zYypu8gXxlc0nZ6si9OC8,28224
|
|
240
240
|
returnn/util/debug_helpers.py,sha256=0EINLK4uLtoSt5_kHs1M2NIFpMd0S7i4c4rx90U4fJk,2914
|
|
241
241
|
returnn/util/file_cache.py,sha256=JvJ4C7NFr8WpiIN0hLk3c33oX4-JfWSpchTjY7JGpCc,23127
|
|
242
242
|
returnn/util/fsa.py,sha256=k2lJ8tyf_g44Xk1EPVLwDwpP4spoMTqIigDVOWocQHY,59177
|
|
243
243
|
returnn/util/literal_py_to_pickle.py,sha256=3dnjWPeeiDT2xp4bRDgIf9yddx7b1AG7mOKEn_jiSl8,2173
|
|
244
|
-
returnn/util/lru_cache.py,sha256=
|
|
244
|
+
returnn/util/lru_cache.py,sha256=7Q5H3a8b07E8e1iB7PA9jCpRnxMJZOFS2KO07cy0gqk,11446
|
|
245
245
|
returnn/util/math.py,sha256=ximPqNsv0Wu6VNcCLqNfsmSu1s-VPsAJYt5nEvFZVtY,6691
|
|
246
246
|
returnn/util/multi_proc_non_daemonic_spawn.py,sha256=YCW3Gry0RJ9Dsc5bKfZ77Q06eLjq6winGniYllJE7PU,9057
|
|
247
247
|
returnn/util/native_code_compiler.py,sha256=T6eZwzNA7AnkpNpo61AbYVNVtKqdWBbQfvJEQVe-cHE,13172
|
|
@@ -253,8 +253,8 @@ returnn/util/sig_proc.py,sha256=Tjz0VOAVyqu2qDCF5HZ1JjALjcFsHcNkcd96WgZeKfE,7265
|
|
|
253
253
|
returnn/util/task_system.py,sha256=y4sMVXQ25Qd2z0rx03uOlXlkE-jbCYC1Sjfn-XlraVU,26003
|
|
254
254
|
returnn/util/train_proc_manager.py,sha256=Pjht28k6uz6BNQ47uW6Gf880iyq5q4wx7P_K2tmoAM8,3266
|
|
255
255
|
returnn/util/watch_memory.py,sha256=BR5P2kvBN6UI81cE0_1WAA6Hd1SByLbBaiDxvLhPOew,4213
|
|
256
|
-
returnn-1.
|
|
257
|
-
returnn-1.
|
|
258
|
-
returnn-1.
|
|
259
|
-
returnn-1.
|
|
260
|
-
returnn-1.
|
|
256
|
+
returnn-1.20250403.110243.dist-info/LICENSE,sha256=ywBD_U2aD4vpuoIgNAsjIGBYydl0tVKll3De0Z8s77c,11041
|
|
257
|
+
returnn-1.20250403.110243.dist-info/METADATA,sha256=YrTbyS1dXOzjiS3hsHx0WiTGN2vC4405oTBhmzV9HFc,5215
|
|
258
|
+
returnn-1.20250403.110243.dist-info/WHEEL,sha256=iAkIy5fosb7FzIOwONchHf19Qu7_1wCWyFNR5gu9nU0,91
|
|
259
|
+
returnn-1.20250403.110243.dist-info/top_level.txt,sha256=Lsn4WZc5Pbfk0-xDQOgnFCxOoqxL4CyeM3N1TFbJncw,8
|
|
260
|
+
returnn-1.20250403.110243.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|