returnn 1.20250515.200041__py3-none-any.whl → 1.20250521.105128__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of returnn might be problematic. Click here for more details.
- returnn/PKG-INFO +1 -1
- returnn/__main__.py +7 -7
- returnn/_setup_info_generated.py +2 -2
- returnn/datasets/lm.py +8 -5
- returnn/util/basic.py +4 -5
- {returnn-1.20250515.200041.dist-info → returnn-1.20250521.105128.dist-info}/METADATA +1 -1
- {returnn-1.20250515.200041.dist-info → returnn-1.20250521.105128.dist-info}/RECORD +10 -10
- {returnn-1.20250515.200041.dist-info → returnn-1.20250521.105128.dist-info}/LICENSE +0 -0
- {returnn-1.20250515.200041.dist-info → returnn-1.20250521.105128.dist-info}/WHEEL +0 -0
- {returnn-1.20250515.200041.dist-info → returnn-1.20250521.105128.dist-info}/top_level.txt +0 -0
returnn/PKG-INFO
CHANGED
returnn/__main__.py
CHANGED
|
@@ -34,21 +34,21 @@ from returnn.util.basic import BackendEngine, BehaviorVersion
|
|
|
34
34
|
|
|
35
35
|
# These imports are not directly used here, but make them available, as other code imports them from here.
|
|
36
36
|
# noinspection PyUnresolvedReferences
|
|
37
|
-
from returnn.util.debug import init_ipython_kernel, init_better_exchook, init_faulthandler, debug_shell
|
|
37
|
+
from returnn.util.debug import init_ipython_kernel, init_better_exchook, init_faulthandler, debug_shell # noqa: F401
|
|
38
38
|
|
|
39
39
|
# Some external scripts import those functions from here, thus keep this here.
|
|
40
40
|
# noinspection PyUnresolvedReferences
|
|
41
|
-
from returnn.util.basic import init_thread_join_hack, describe_returnn_version
|
|
41
|
+
from returnn.util.basic import init_thread_join_hack, describe_returnn_version # noqa: F401
|
|
42
42
|
|
|
43
43
|
if TYPE_CHECKING:
|
|
44
44
|
import returnn.tf.engine
|
|
45
45
|
import returnn.torch.engine
|
|
46
46
|
|
|
47
|
-
config
|
|
48
|
-
engine
|
|
49
|
-
train_data
|
|
50
|
-
dev_data
|
|
51
|
-
eval_data
|
|
47
|
+
config: Optional[Config] = None
|
|
48
|
+
engine: Optional[Union[returnn.tf.engine.Engine, returnn.torch.engine.Engine]] = None
|
|
49
|
+
train_data: Optional[Dataset] = None
|
|
50
|
+
dev_data: Optional[Dataset] = None
|
|
51
|
+
eval_data: Optional[Dataset] = None
|
|
52
52
|
quit_returnn = False
|
|
53
53
|
|
|
54
54
|
|
returnn/_setup_info_generated.py
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
version = '1.
|
|
2
|
-
long_version = '1.
|
|
1
|
+
version = '1.20250521.105128'
|
|
2
|
+
long_version = '1.20250521.105128+git.57d7340'
|
returnn/datasets/lm.py
CHANGED
|
@@ -85,6 +85,7 @@ class LmDataset(CachedDataset2):
|
|
|
85
85
|
add_delayed_seq_data=False,
|
|
86
86
|
delayed_seq_data_start_symbol="[START]",
|
|
87
87
|
dtype: Optional[str] = None,
|
|
88
|
+
tag_prefix: Optional[str] = None,
|
|
88
89
|
**kwargs,
|
|
89
90
|
):
|
|
90
91
|
"""
|
|
@@ -288,7 +289,9 @@ class LmDataset(CachedDataset2):
|
|
|
288
289
|
self.num_outputs = {"data": [num_labels, 1]}
|
|
289
290
|
self.num_inputs = num_labels
|
|
290
291
|
self.seq_order = None
|
|
291
|
-
|
|
292
|
+
|
|
293
|
+
# sequence tag is "line-n", where n is the line number (to be compatible with translation)
|
|
294
|
+
self.tag_prefix = tag_prefix or "line-"
|
|
292
295
|
self.auto_replace_unknown_symbol = auto_replace_unknown_symbol
|
|
293
296
|
self.log_auto_replace_unknown_symbols = log_auto_replace_unknown_symbols
|
|
294
297
|
self.log_skipped_seqs = log_skipped_seqs
|
|
@@ -504,8 +507,8 @@ class LmDataset(CachedDataset2):
|
|
|
504
507
|
elif seq_list is not None:
|
|
505
508
|
# Might not be initialized. Can even do without init. Thus check seq_list_file.
|
|
506
509
|
if self._seq_list_file is None:
|
|
507
|
-
assert all(s.startswith(self.
|
|
508
|
-
self.seq_order = [int(s[len(self.
|
|
510
|
+
assert all(s.startswith(self.tag_prefix) for s in seq_list)
|
|
511
|
+
self.seq_order = [int(s[len(self.tag_prefix) :]) for s in seq_list]
|
|
509
512
|
else:
|
|
510
513
|
# Need seq list for this. Just do the lazy init now.
|
|
511
514
|
self._lazy_init()
|
|
@@ -555,7 +558,7 @@ class LmDataset(CachedDataset2):
|
|
|
555
558
|
if self._seq_list is not None:
|
|
556
559
|
return self._seq_list
|
|
557
560
|
num_seqs = self.get_total_num_seqs()
|
|
558
|
-
return [self.
|
|
561
|
+
return [self.tag_prefix + str(line_nr) for line_nr in range(num_seqs)]
|
|
559
562
|
|
|
560
563
|
def _reduce_log_skipped_seqs(self):
|
|
561
564
|
if isinstance(self.log_skipped_seqs, bool):
|
|
@@ -594,7 +597,7 @@ class LmDataset(CachedDataset2):
|
|
|
594
597
|
idx, offset, len_ = self._orths_offsets_and_lens[true_idx]
|
|
595
598
|
orth = self._orth_mmaps[idx][offset : offset + len_].decode("utf8").strip()
|
|
596
599
|
if self._seq_list is None:
|
|
597
|
-
seq_tag = self.
|
|
600
|
+
seq_tag = self.tag_prefix + str(true_idx)
|
|
598
601
|
else:
|
|
599
602
|
seq_tag = self._seq_list[true_idx]
|
|
600
603
|
self.next_orth_idx += 1
|
returnn/util/basic.py
CHANGED
|
@@ -1677,17 +1677,16 @@ def random_orthogonal(shape, gain=1.0, seed=None):
|
|
|
1677
1677
|
|
|
1678
1678
|
|
|
1679
1679
|
# noinspection PyUnusedLocal
|
|
1680
|
-
def inplace_increment(x, idx, y):
|
|
1680
|
+
def inplace_increment(x: numpy.ndarray, idx: numpy.ndarray, y: Union[numpy.ndarray, float, int]) -> numpy.ndarray:
|
|
1681
1681
|
"""
|
|
1682
1682
|
This basically does `x[idx] += y`.
|
|
1683
1683
|
The difference to the Numpy version is that in case some index is there multiple
|
|
1684
1684
|
times, it will only be incremented once (and it is not specified which one).
|
|
1685
1685
|
See also theano.tensor.subtensor.AdvancedIncSubtensor documentation.
|
|
1686
1686
|
|
|
1687
|
-
:param
|
|
1688
|
-
:param
|
|
1689
|
-
:param
|
|
1690
|
-
:rtype: numpy.ndarray
|
|
1687
|
+
:param x:
|
|
1688
|
+
:param idx:
|
|
1689
|
+
:param y:
|
|
1691
1690
|
"""
|
|
1692
1691
|
raise NotImplementedError("This feature was removed with dropped Theano support")
|
|
1693
1692
|
|
|
@@ -1,9 +1,9 @@
|
|
|
1
|
-
returnn/PKG-INFO,sha256
|
|
1
|
+
returnn/PKG-INFO,sha256=9FIsKQntzHycJxh5W0elKEkWr68gbK3bh6hOmYiY2gk,5215
|
|
2
2
|
returnn/__init__.py,sha256=biBtRsM0WZ406vShaeH-9WFoqJ8XwTbn6g0EeFJ7l8E,1012
|
|
3
|
-
returnn/__main__.py,sha256=
|
|
3
|
+
returnn/__main__.py,sha256=lHyZcu_0yc9f7Vf_Kfdy9PmeU0T76XVXnpalHi5WKro,31740
|
|
4
4
|
returnn/__old_mod_loader__.py,sha256=nvsNY-xELdS_IPNkv66Q9Rmvg4dbGW0-EBRDcCmctos,7654
|
|
5
5
|
returnn/__setup__.py,sha256=22kQn2fh11iPM0hLb2Fy5sLmoU1JGvmDxXRYuRgQkwU,4659
|
|
6
|
-
returnn/_setup_info_generated.py,sha256=
|
|
6
|
+
returnn/_setup_info_generated.py,sha256=fRT-AuqUKrqoSgbmGlg_6qxAX0iBoVGsUA6jkyc4BvQ,77
|
|
7
7
|
returnn/config.py,sha256=3tmKhB6FnQZaNdtcYsiB61JnEY--iZ2qmJ4yq0b6tE0,29140
|
|
8
8
|
returnn/forward_iface.py,sha256=A_OJiaXsX4MlXQRzST86ylyxSUZbC402PQL1REcqHjM,911
|
|
9
9
|
returnn/learning_rate_control.py,sha256=ZvWryAn_tv9DhV8sh1LV3eE34Yltl3On3mYZAG4hR9s,34684
|
|
@@ -20,7 +20,7 @@ returnn/datasets/cached2.py,sha256=_6pza3IG68JexaExhj1ld3fP6pE7T-G804driJ9Z_qo,1
|
|
|
20
20
|
returnn/datasets/distrib_files.py,sha256=9-3pJaF8Ws1Cs4AlelFCODz6b5YiaTsrD7tMCB76PDY,29865
|
|
21
21
|
returnn/datasets/generating.py,sha256=9U_w6URIrv-Rb-hDbPOzYW9qYXzJbw32N6G268IKyoM,99833
|
|
22
22
|
returnn/datasets/hdf.py,sha256=v5sjBenURR9Z-g7AQ9tsL84yDSye5RtbLpym3M6HSDE,67833
|
|
23
|
-
returnn/datasets/lm.py,sha256=
|
|
23
|
+
returnn/datasets/lm.py,sha256=IqUsOzbdSWUynL0YFL25HbtMR4AxaQGHvjjqRE9IwBo,99215
|
|
24
24
|
returnn/datasets/map.py,sha256=kOBJVZmwDhLsOplzDNByIfa0NRSUaMo2Lsy36lBvxrM,10907
|
|
25
25
|
returnn/datasets/meta.py,sha256=KQtidTgSh-1gNgbpJ8OhXt6v2lkhPPH5dpjfzwsr3E4,95251
|
|
26
26
|
returnn/datasets/multi_proc.py,sha256=aVjsLt2qjHnHOrEYCgIPCwNYE-f1fiGP6eZ8NGAr3A4,22583
|
|
@@ -233,7 +233,7 @@ returnn/torch/util/gradient_checkpoint.py,sha256=iLy-FB65DC8O6LxzmMvFjnSdpIVpko8
|
|
|
233
233
|
returnn/torch/util/module.py,sha256=MXHIrF9Isu575DDJIa81212ULKwdqu1oOLxDVZecVSk,1693
|
|
234
234
|
returnn/torch/util/scaled_gradient.py,sha256=C5e79mpqtxdtw08OTSy413TSBSlOertRisc-ioiFIaU,3191
|
|
235
235
|
returnn/util/__init__.py,sha256=UIG1qw4idqhW71BV60ha7h9PktxvEVcBIu0lYRossK8,336
|
|
236
|
-
returnn/util/basic.py,sha256=
|
|
236
|
+
returnn/util/basic.py,sha256=Ep67bFPbxiaMKgsjrUqF0seoswghAqLsUQYcpgQGeyE,142570
|
|
237
237
|
returnn/util/better_exchook.py,sha256=98XnUZIWpYN7NfklSGt_5hYNplADVFQnh857esKxjdI,64475
|
|
238
238
|
returnn/util/bpe.py,sha256=LWFhICZsEOnMwNws0lybPNzKRX6rSr8yKCvP65vjl9Y,19656
|
|
239
239
|
returnn/util/debug.py,sha256=wuRzdg9zB84WWCGyTjmRR_zYypu8gXxlc0nZ6si9OC8,28224
|
|
@@ -253,8 +253,8 @@ returnn/util/sig_proc.py,sha256=Tjz0VOAVyqu2qDCF5HZ1JjALjcFsHcNkcd96WgZeKfE,7265
|
|
|
253
253
|
returnn/util/task_system.py,sha256=y4sMVXQ25Qd2z0rx03uOlXlkE-jbCYC1Sjfn-XlraVU,26003
|
|
254
254
|
returnn/util/train_proc_manager.py,sha256=Pjht28k6uz6BNQ47uW6Gf880iyq5q4wx7P_K2tmoAM8,3266
|
|
255
255
|
returnn/util/watch_memory.py,sha256=BR5P2kvBN6UI81cE0_1WAA6Hd1SByLbBaiDxvLhPOew,4213
|
|
256
|
-
returnn-1.
|
|
257
|
-
returnn-1.
|
|
258
|
-
returnn-1.
|
|
259
|
-
returnn-1.
|
|
260
|
-
returnn-1.
|
|
256
|
+
returnn-1.20250521.105128.dist-info/LICENSE,sha256=ywBD_U2aD4vpuoIgNAsjIGBYydl0tVKll3De0Z8s77c,11041
|
|
257
|
+
returnn-1.20250521.105128.dist-info/METADATA,sha256=9FIsKQntzHycJxh5W0elKEkWr68gbK3bh6hOmYiY2gk,5215
|
|
258
|
+
returnn-1.20250521.105128.dist-info/WHEEL,sha256=iAkIy5fosb7FzIOwONchHf19Qu7_1wCWyFNR5gu9nU0,91
|
|
259
|
+
returnn-1.20250521.105128.dist-info/top_level.txt,sha256=Lsn4WZc5Pbfk0-xDQOgnFCxOoqxL4CyeM3N1TFbJncw,8
|
|
260
|
+
returnn-1.20250521.105128.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|