returnn 1.20250423.155627__py3-none-any.whl → 1.20250425.85727__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of returnn might be problematic. Click here for more details.
- returnn/PKG-INFO +1 -1
- returnn/_setup_info_generated.py +2 -2
- returnn/datasets/util/vocabulary.py +6 -0
- returnn/torch/data/extern_data.py +4 -0
- returnn/torch/engine.py +2 -0
- {returnn-1.20250423.155627.dist-info → returnn-1.20250425.85727.dist-info}/METADATA +1 -1
- {returnn-1.20250423.155627.dist-info → returnn-1.20250425.85727.dist-info}/RECORD +10 -10
- {returnn-1.20250423.155627.dist-info → returnn-1.20250425.85727.dist-info}/LICENSE +0 -0
- {returnn-1.20250423.155627.dist-info → returnn-1.20250425.85727.dist-info}/WHEEL +0 -0
- {returnn-1.20250423.155627.dist-info → returnn-1.20250425.85727.dist-info}/top_level.txt +0 -0
returnn/PKG-INFO
CHANGED
returnn/_setup_info_generated.py
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
version = '1.
|
|
2
|
-
long_version = '1.
|
|
1
|
+
version = '1.20250425.085727'
|
|
2
|
+
long_version = '1.20250425.085727+git.547e726'
|
|
@@ -505,7 +505,13 @@ class SentencePieces(Vocabulary):
|
|
|
505
505
|
"""
|
|
506
506
|
import sentencepiece as spm # noqa
|
|
507
507
|
|
|
508
|
+
opts = opts.copy()
|
|
509
|
+
for k in ["model_file", "model_proto"]:
|
|
510
|
+
if k in opts:
|
|
511
|
+
# Make sure it is a string. (Could be e.g. Sis Path.)
|
|
512
|
+
opts[k] = str(opts[k])
|
|
508
513
|
self._opts = opts
|
|
514
|
+
opts = opts.copy()
|
|
509
515
|
self._cache_key = opts.get("model_file", None)
|
|
510
516
|
control_symbols = opts.pop("control_symbols", None)
|
|
511
517
|
user_defined_symbols = opts.pop("user_defined_symbols", None)
|
|
@@ -28,12 +28,14 @@ def raw_dict_to_extern_data(
|
|
|
28
28
|
extern_data_template: TensorDict,
|
|
29
29
|
device: Union[str, torch.device],
|
|
30
30
|
float_dtype: Optional[Union[str, torch.dtype]] = None,
|
|
31
|
+
with_eval_targets: bool = False,
|
|
31
32
|
) -> TensorDict:
|
|
32
33
|
"""
|
|
33
34
|
:param extern_data_raw: This comes out of the DataLoader, via our collate_batch.
|
|
34
35
|
:param extern_data_template: Specified via `extern_data` in the config.
|
|
35
36
|
:param device: E.g. the GPU.
|
|
36
37
|
:param float_dtype:
|
|
38
|
+
:param with_eval_targets: if False, we skip all tensors with ``available_for_inference=False``.
|
|
37
39
|
:return: tensor dict, like extern_data_template, but with raw tensors set to Torch tensors, on the right device.
|
|
38
40
|
"""
|
|
39
41
|
if isinstance(float_dtype, str):
|
|
@@ -47,6 +49,8 @@ def raw_dict_to_extern_data(
|
|
|
47
49
|
batch_dim.dyn_size_ext = Tensor(batch_dim.name or "batch", dims=[], dtype="int32")
|
|
48
50
|
extern_data = TensorDict()
|
|
49
51
|
for k, data in extern_data_template.data.items():
|
|
52
|
+
if not with_eval_targets and not data.available_for_inference:
|
|
53
|
+
continue
|
|
50
54
|
data = data.copy_template()
|
|
51
55
|
raw_tensor = extern_data_raw[k]
|
|
52
56
|
assert len(raw_tensor.shape) == data.batch_ndim, f"ndim mismatch for {k}: {raw_tensor.shape} vs {data}"
|
returnn/torch/engine.py
CHANGED
|
@@ -435,6 +435,7 @@ class Engine(EngineBase):
|
|
|
435
435
|
extern_data_template=self.extern_data,
|
|
436
436
|
device=self._device,
|
|
437
437
|
float_dtype=self._default_float_dtype,
|
|
438
|
+
with_eval_targets=True,
|
|
438
439
|
)
|
|
439
440
|
self._run_step(extern_data, train_flag=True, train_func=True)
|
|
440
441
|
|
|
@@ -660,6 +661,7 @@ class Engine(EngineBase):
|
|
|
660
661
|
extern_data_template=self.extern_data,
|
|
661
662
|
device=self._device,
|
|
662
663
|
float_dtype=self._default_float_dtype,
|
|
664
|
+
with_eval_targets=True,
|
|
663
665
|
)
|
|
664
666
|
|
|
665
667
|
self._run_step(extern_data, train_func=True)
|
|
@@ -1,9 +1,9 @@
|
|
|
1
|
-
returnn/PKG-INFO,sha256=
|
|
1
|
+
returnn/PKG-INFO,sha256=d21Lj9SyyBGpYI76EmdiZA-0o461Y9Obq0xdEUtuym0,5214
|
|
2
2
|
returnn/__init__.py,sha256=biBtRsM0WZ406vShaeH-9WFoqJ8XwTbn6g0EeFJ7l8E,1012
|
|
3
3
|
returnn/__main__.py,sha256=qBFbuB1yN3adgVM5pXt2-Yq9vorjRNchNPL8kDKx44M,31752
|
|
4
4
|
returnn/__old_mod_loader__.py,sha256=nvsNY-xELdS_IPNkv66Q9Rmvg4dbGW0-EBRDcCmctos,7654
|
|
5
5
|
returnn/__setup__.py,sha256=22kQn2fh11iPM0hLb2Fy5sLmoU1JGvmDxXRYuRgQkwU,4659
|
|
6
|
-
returnn/_setup_info_generated.py,sha256=
|
|
6
|
+
returnn/_setup_info_generated.py,sha256=mn_I1EXYlj19uyaatMxZYx7k2X5fwxbUzLRPoDt_juI,77
|
|
7
7
|
returnn/config.py,sha256=3tmKhB6FnQZaNdtcYsiB61JnEY--iZ2qmJ4yq0b6tE0,29140
|
|
8
8
|
returnn/forward_iface.py,sha256=A_OJiaXsX4MlXQRzST86ylyxSUZbC402PQL1REcqHjM,911
|
|
9
9
|
returnn/learning_rate_control.py,sha256=ZvWryAn_tv9DhV8sh1LV3eE34Yltl3On3mYZAG4hR9s,34684
|
|
@@ -34,7 +34,7 @@ returnn/datasets/text_dict.py,sha256=BPE73nh6-vtSLy3SiDf4dpFl9RJorE7oO6l5y2FU3MI
|
|
|
34
34
|
returnn/datasets/util/__init__.py,sha256=rEKhSD6fyhDiQF-x7dUQMwa29JZu72SDm7mYcCcLghY,52
|
|
35
35
|
returnn/datasets/util/feature_extraction.py,sha256=axtXDb9wcNpOmyhmW3WJUj5xda29TKkKvOcGGvq7ExA,23923
|
|
36
36
|
returnn/datasets/util/strings.py,sha256=Xg-Nt2mI5Gi7Eb1bER1bmkZJdQg6QhnMANZOf1IzzJ4,413
|
|
37
|
-
returnn/datasets/util/vocabulary.py,sha256=
|
|
37
|
+
returnn/datasets/util/vocabulary.py,sha256=adeqbfgiVEqifdgkfIa3XIWfiCl3XW0dgJLCFDb-LG0,27180
|
|
38
38
|
returnn/engine/__init__.py,sha256=br7hpn8i_hIBi2uTQfnN3BF9g5DREYa_mQi0_Nvlu6o,228
|
|
39
39
|
returnn/engine/base.py,sha256=0n4FtB_B2H3W_9KdoLr0P7YPER-hVkbk69pwFqsqmqw,18467
|
|
40
40
|
returnn/engine/batch.py,sha256=amXW8mGspuSQjo00JdisE2eOLy5Ij1weWWzkE-lXSJM,9912
|
|
@@ -207,10 +207,10 @@ returnn/tf/util/open_fst.py,sha256=sZRDw4TbxvhGqpGdUJWy1ebvlZm4_RPhygpRw9uLAOQ,1
|
|
|
207
207
|
returnn/torch/README.md,sha256=jzJ2FpOHW02vxN69yKaV97C9LI-hmvjBglKfdZXIDdc,85
|
|
208
208
|
returnn/torch/__init__.py,sha256=MHEUyNHB20Vy89uKAqZoj6FxJKF1Gq3HW-i6ra1pNcI,24
|
|
209
209
|
returnn/torch/distributed.py,sha256=skFyutdVztxgTEk3HHJ8S83qRWbNpkNT8Tj16Ic0_hE,6981
|
|
210
|
-
returnn/torch/engine.py,sha256=
|
|
210
|
+
returnn/torch/engine.py,sha256=7vFqhESIR_0icesKyJ04CpaqqaFHUlWy0d64SjZMErE,77925
|
|
211
211
|
returnn/torch/updater.py,sha256=GqtBvZpElPVMm0lq84JPl4NVLFFETZAzAbR0rTomSao,28249
|
|
212
212
|
returnn/torch/data/__init__.py,sha256=6cLNEi8KoGI12PF6akN7mI_mtjlx-0hcQAfMYoExwik,132
|
|
213
|
-
returnn/torch/data/extern_data.py,sha256=
|
|
213
|
+
returnn/torch/data/extern_data.py,sha256=zEoezCRg9JMf53KZJmWpzHspyAS9M8X3jEg4SmHSzko,7830
|
|
214
214
|
returnn/torch/data/pipeline.py,sha256=mA6R1QU9vvRmfaUBvdqI9jQeIB3O-01ODcpmXs1SZ-w,29458
|
|
215
215
|
returnn/torch/data/queued_data_iter.py,sha256=PoOsGHdHVZjTmcyfq_ZOw--P6hyfTdmAWIRGq_Z_nLM,888
|
|
216
216
|
returnn/torch/data/returnn_dataset_wrapper.py,sha256=2CaDapzrlqahANuq-nyVAtv5ENHuM8A7okORwYJDisg,8006
|
|
@@ -253,8 +253,8 @@ returnn/util/sig_proc.py,sha256=Tjz0VOAVyqu2qDCF5HZ1JjALjcFsHcNkcd96WgZeKfE,7265
|
|
|
253
253
|
returnn/util/task_system.py,sha256=y4sMVXQ25Qd2z0rx03uOlXlkE-jbCYC1Sjfn-XlraVU,26003
|
|
254
254
|
returnn/util/train_proc_manager.py,sha256=Pjht28k6uz6BNQ47uW6Gf880iyq5q4wx7P_K2tmoAM8,3266
|
|
255
255
|
returnn/util/watch_memory.py,sha256=BR5P2kvBN6UI81cE0_1WAA6Hd1SByLbBaiDxvLhPOew,4213
|
|
256
|
-
returnn-1.
|
|
257
|
-
returnn-1.
|
|
258
|
-
returnn-1.
|
|
259
|
-
returnn-1.
|
|
260
|
-
returnn-1.
|
|
256
|
+
returnn-1.20250425.85727.dist-info/LICENSE,sha256=ywBD_U2aD4vpuoIgNAsjIGBYydl0tVKll3De0Z8s77c,11041
|
|
257
|
+
returnn-1.20250425.85727.dist-info/METADATA,sha256=d21Lj9SyyBGpYI76EmdiZA-0o461Y9Obq0xdEUtuym0,5214
|
|
258
|
+
returnn-1.20250425.85727.dist-info/WHEEL,sha256=iAkIy5fosb7FzIOwONchHf19Qu7_1wCWyFNR5gu9nU0,91
|
|
259
|
+
returnn-1.20250425.85727.dist-info/top_level.txt,sha256=Lsn4WZc5Pbfk0-xDQOgnFCxOoqxL4CyeM3N1TFbJncw,8
|
|
260
|
+
returnn-1.20250425.85727.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|