returnn 1.20250223.154045__py3-none-any.whl → 1.20250225.201207__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of returnn might be problematic. Click here for more details.

returnn/PKG-INFO CHANGED
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: returnn
3
- Version: 1.20250223.154045
3
+ Version: 1.20250225.201207
4
4
  Summary: The RWTH extensible training framework for universal recurrent neural networks
5
5
  Home-page: https://github.com/rwth-i6/returnn/
6
6
  Author: Albert Zeyer
@@ -1,2 +1,2 @@
1
- version = '1.20250223.154045'
2
- long_version = '1.20250223.154045+git.354cf31'
1
+ version = '1.20250225.201207'
2
+ long_version = '1.20250225.201207+git.c7cfe6c'
@@ -939,8 +939,8 @@ class Backend(Generic[T]):
939
939
  raise NotImplementedError
940
940
 
941
941
  @staticmethod
942
- def flip(source: Tensor, *, axis: Dim) -> Tensor:
943
- """flip"""
942
+ def flip_no_mask(source: Tensor, *, axis: Dim) -> Tensor:
943
+ """flip, ignoring masking"""
944
944
  raise NotImplementedError
945
945
 
946
946
  @staticmethod
@@ -176,6 +176,12 @@ def merge_dims(
176
176
  :param out_dim:
177
177
  :return: tensor, out_dim
178
178
  """
179
+ if not dims:
180
+ if out_dim:
181
+ assert out_dim.dimension == 1
182
+ else:
183
+ out_dim = Dim(1, name="ext")
184
+ return rf.expand_dim(source, out_dim), out_dim
179
185
  # noinspection PyProtectedMember
180
186
  return source._raw_backend.merge_dims(source, dims=dims, out_dim=out_dim)
181
187
 
@@ -1060,7 +1066,7 @@ def reverse_sequence(tensor: Tensor, *, axis: Dim, handle_dynamic_dims: bool = T
1060
1066
  """
1061
1067
  if not handle_dynamic_dims or not axis.need_masking():
1062
1068
  # noinspection PyProtectedMember
1063
- return tensor._raw_backend.flip(tensor, axis=axis)
1069
+ return tensor._raw_backend.flip_no_mask(tensor, axis=axis)
1064
1070
  indices = rf.combine_bc(axis.get_size_tensor(), "-", rf.range_over_dim(axis)) - 1
1065
1071
  return rf.gather(tensor, indices=indices, axis=axis, clip_to_valid=True)
1066
1072
 
@@ -692,7 +692,7 @@ class ReturnnLayersBackend(Backend[Layer]):
692
692
  )
693
693
 
694
694
  @staticmethod
695
- def flip(source: Tensor, *, axis: Dim) -> Tensor:
695
+ def flip_no_mask(source: Tensor, *, axis: Dim) -> Tensor:
696
696
  """flip"""
697
697
  return rfl.make_layer(
698
698
  {"class": "slice", "from": source, "axis": axis, "out_dim": axis, "slice_step": -1}, name="flip"
@@ -1185,8 +1185,8 @@ class TorchBackend(Backend[torch.Tensor]):
1185
1185
  return out
1186
1186
 
1187
1187
  @staticmethod
1188
- def flip(source: Tensor, *, axis: Dim) -> Tensor:
1189
- """flip"""
1188
+ def flip_no_mask(source: Tensor, *, axis: Dim) -> Tensor:
1189
+ """flip, ignoring masking"""
1190
1190
  axis_int = source.get_axis_from_description(axis, allow_int=False)
1191
1191
  out = source.copy_template("flip")
1192
1192
  out.raw_tensor = torch.flip(source.raw_tensor, [axis_int])
@@ -1224,6 +1224,8 @@ class TorchBackend(Backend[torch.Tensor]):
1224
1224
  @staticmethod
1225
1225
  def sort(source: Tensor, *, axis: Dim, descending: bool, stable: bool) -> Tuple[Tensor, Tensor, Dim]:
1226
1226
  """sort. return values and indices"""
1227
+ if axis.need_masking():
1228
+ raise NotImplementedError(f"sort: dynamic axis {axis} not supported")
1227
1229
  axis_int = source.get_axis_from_description(axis, allow_int=False)
1228
1230
  # Move to last axis. Should be more efficient.
1229
1231
  source = source.copy_move_axis(axis_int, -1)
@@ -1501,7 +1503,7 @@ class TorchBackend(Backend[torch.Tensor]):
1501
1503
  mask = source.get_sequence_mask_broadcast(dim)
1502
1504
  source.raw_tensor = torch.where(mask, source.raw_tensor, mask_value)
1503
1505
  func = getattr(torch, mode)
1504
- if not res_dims:
1506
+ if not res_dims and mode != "logsumexp": # logsumexp requires dim arg
1505
1507
  raw_result = func(source.raw_tensor)
1506
1508
  elif len(raw_dims) == 1:
1507
1509
  raw_result = func(source.raw_tensor, dim=raw_dims[0])
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: returnn
3
- Version: 1.20250223.154045
3
+ Version: 1.20250225.201207
4
4
  Summary: The RWTH extensible training framework for universal recurrent neural networks
5
5
  Home-page: https://github.com/rwth-i6/returnn/
6
6
  Author: Albert Zeyer
@@ -1,9 +1,9 @@
1
- returnn/PKG-INFO,sha256=osW4TLxe1IbJ9J1E7K31tPJAgyAAEoVhILW6twz1beg,5215
1
+ returnn/PKG-INFO,sha256=MlICTMbISeiq6sz_1NI8XnxGZLlQzE0eH9VfGGDjlKs,5215
2
2
  returnn/__init__.py,sha256=biBtRsM0WZ406vShaeH-9WFoqJ8XwTbn6g0EeFJ7l8E,1012
3
3
  returnn/__main__.py,sha256=qBFbuB1yN3adgVM5pXt2-Yq9vorjRNchNPL8kDKx44M,31752
4
4
  returnn/__old_mod_loader__.py,sha256=nvsNY-xELdS_IPNkv66Q9Rmvg4dbGW0-EBRDcCmctos,7654
5
5
  returnn/__setup__.py,sha256=22kQn2fh11iPM0hLb2Fy5sLmoU1JGvmDxXRYuRgQkwU,4659
6
- returnn/_setup_info_generated.py,sha256=ZkDu5IsuZj9TiGVIFC5h52LbiTy4KoJCJD-MAi4ZWmc,77
6
+ returnn/_setup_info_generated.py,sha256=DwJubgEQQUSxnLSgr9-UFixkYOeM2bKYKKxyIW_3L3w,77
7
7
  returnn/config.py,sha256=3tmKhB6FnQZaNdtcYsiB61JnEY--iZ2qmJ4yq0b6tE0,29140
8
8
  returnn/forward_iface.py,sha256=A_OJiaXsX4MlXQRzST86ylyxSUZbC402PQL1REcqHjM,911
9
9
  returnn/learning_rate_control.py,sha256=ZvWryAn_tv9DhV8sh1LV3eE34Yltl3On3mYZAG4hR9s,34684
@@ -75,12 +75,12 @@ returnn/extern/graph_editor/subgraph.py,sha256=R3uIFqWgiL7L5S4YATm9o9a3wfEa_mSb4
75
75
  returnn/extern/graph_editor/transform.py,sha256=d9fEgu0JC342q0g9niVxRWMKzkQQA9mrrajBGcU1o_s,29349
76
76
  returnn/extern/graph_editor/util.py,sha256=QMrQeQZ7lJwsrNQub9tof0h3quEaoHiGJaZmogQ7jXE,18707
77
77
  returnn/frontend/__init__.py,sha256=2aS7nbxXniIrBp2DODl0xN0f3IJ_dX4Bi9ZlR7W5_DE,1472
78
- returnn/frontend/_backend.py,sha256=lRAtOT0oAkgc_WGYBUviGbgIH3Yet6D17sjlEJH56Pg,50327
78
+ returnn/frontend/_backend.py,sha256=VWTe2ps8UK9BQpbnZRqNfbesQ6PGH5WHqkEa4ai8btw,50353
79
79
  returnn/frontend/_cache.py,sha256=JAhi7L-raQ3A-NC3JUYDtdRTwT3BGJJGGZxrZ8MfEWQ,8403
80
80
  returnn/frontend/_numpy_backend.py,sha256=2oCtG0YCWL_89v4cD_jDj8em1O_Fp-_YWl5EblGi_yo,7858
81
81
  returnn/frontend/_random_journal.py,sha256=_ktP_mjgx8vtQQGX_DofdhewJj0aPiczefTWeemPkmo,5457
82
82
  returnn/frontend/_utils.py,sha256=4A3MSRM0i86J77550uR_AjcBEPu6nymLUZ9Xd1V3Fkc,12073
83
- returnn/frontend/array_.py,sha256=x_OSKQ_WyUFqKWEJdf3dHc6bfifvkV_aiVsmaZVCEv0,47816
83
+ returnn/frontend/array_.py,sha256=CYk8lQinS2EDINBttl4UqSYP2BhqikeSjnbNy9Mzpx4,48013
84
84
  returnn/frontend/attention.py,sha256=GKt-Xqnz8sIyXVrE0i4VCS7J2Wu7dmoH_BA0Cu8CrXQ,45769
85
85
  returnn/frontend/backend.py,sha256=iQ9w4xl8Ea7bgpb0VUaCKq50rV5Bl2E5J8Rhd-oqD_c,883
86
86
  returnn/frontend/build_from_dict.py,sha256=rfWa2rjjhIR_kIQED_nMrygrQBunS6unegzWTLVbC98,3017
@@ -177,7 +177,7 @@ returnn/tf/sprint.py,sha256=Yqjh0-6sCWHpdDPQCzHKx7TwQCOjJyjfd0KHtnYdd-8,5471
177
177
  returnn/tf/updater.py,sha256=St4Z5iBjlkWaB6CiS-K1VNc_iLaan2e6-mVMTTPldzk,72034
178
178
  returnn/tf/frontend_layers/README.md,sha256=P4vVl_EK-4jT55m40mq-K4Nr9yFY0tJR5fmDzTHSDFE,1096
179
179
  returnn/tf/frontend_layers/__init__.py,sha256=MGUn7rv6fOefbtkX-5pq6fC1T6Y5h0oh1uOPSEcv1_I,506
180
- returnn/tf/frontend_layers/_backend.py,sha256=8lWE6LxxdNx8FnFvp2Pnk-UqJ8oymxXwx7s9HTEgDug,47443
180
+ returnn/tf/frontend_layers/_backend.py,sha256=6bT_4fjfV0IRcFqcZ0kcWLx0eYZGRqAJDTEfWSRIFnA,47451
181
181
  returnn/tf/frontend_layers/_utils.py,sha256=ijByaDOqPDod5mZC9EoTkt8PHBEODXHsWbkwDOF9XW4,4205
182
182
  returnn/tf/frontend_layers/cond.py,sha256=yQ2h5W0sgMZndJdrWv2EE9k9yIcspQ1U0HwBSh3hOKE,14830
183
183
  returnn/tf/frontend_layers/config_entry_points.py,sha256=t01RWOiaZohzuqPXX-MLV0P5yCOfE0dz-9dZ77_pK4c,5751
@@ -216,7 +216,7 @@ returnn/torch/data/queued_data_iter.py,sha256=PoOsGHdHVZjTmcyfq_ZOw--P6hyfTdmAWI
216
216
  returnn/torch/data/returnn_dataset_wrapper.py,sha256=1Bw82-Ge_8m_DSDXZNqQ3zGDic2HQlp6jysELL0NVK0,7369
217
217
  returnn/torch/data/tensor_utils.py,sha256=-Teqi--LLbt6q_5mDRdoHZHmPgSdC83W706ukif_YiU,1284
218
218
  returnn/torch/frontend/__init__.py,sha256=AA48HZnC17ASuKA0EWy8loZ-Bib_yUtqF4T1wYvjst4,62
219
- returnn/torch/frontend/_backend.py,sha256=ZHeE5A9nPo6i2KShRRNkiqpIrz4DmA0g3QhWddzFikg,101274
219
+ returnn/torch/frontend/_backend.py,sha256=8rCnNRoiUf_Sqmb1u2Y7Mf89Hmzd0LkrroLoXVKn6ww,101468
220
220
  returnn/torch/frontend/_rand.py,sha256=1JgIkV2XmpgJD86zXZ-NCAe-QuoP2swr6NaS1oz3Qa8,1830
221
221
  returnn/torch/frontend/bridge.py,sha256=Z2_UW8AagezC7zsXDc5PKcd8G9WwisV7j9SWGHU0m4U,7840
222
222
  returnn/torch/frontend/raw_ops.py,sha256=lF0h-KtYYsdaaqQADylVZp9qzPskOOXA4MfmYDyx5IU,296
@@ -253,8 +253,8 @@ returnn/util/sig_proc.py,sha256=Tjz0VOAVyqu2qDCF5HZ1JjALjcFsHcNkcd96WgZeKfE,7265
253
253
  returnn/util/task_system.py,sha256=y4sMVXQ25Qd2z0rx03uOlXlkE-jbCYC1Sjfn-XlraVU,26003
254
254
  returnn/util/train_proc_manager.py,sha256=Pjht28k6uz6BNQ47uW6Gf880iyq5q4wx7P_K2tmoAM8,3266
255
255
  returnn/util/watch_memory.py,sha256=BR5P2kvBN6UI81cE0_1WAA6Hd1SByLbBaiDxvLhPOew,4213
256
- returnn-1.20250223.154045.dist-info/LICENSE,sha256=ywBD_U2aD4vpuoIgNAsjIGBYydl0tVKll3De0Z8s77c,11041
257
- returnn-1.20250223.154045.dist-info/METADATA,sha256=osW4TLxe1IbJ9J1E7K31tPJAgyAAEoVhILW6twz1beg,5215
258
- returnn-1.20250223.154045.dist-info/WHEEL,sha256=P9jw-gEje8ByB7_hXoICnHtVCrEwMQh-630tKvQWehc,91
259
- returnn-1.20250223.154045.dist-info/top_level.txt,sha256=Lsn4WZc5Pbfk0-xDQOgnFCxOoqxL4CyeM3N1TFbJncw,8
260
- returnn-1.20250223.154045.dist-info/RECORD,,
256
+ returnn-1.20250225.201207.dist-info/LICENSE,sha256=ywBD_U2aD4vpuoIgNAsjIGBYydl0tVKll3De0Z8s77c,11041
257
+ returnn-1.20250225.201207.dist-info/METADATA,sha256=MlICTMbISeiq6sz_1NI8XnxGZLlQzE0eH9VfGGDjlKs,5215
258
+ returnn-1.20250225.201207.dist-info/WHEEL,sha256=P9jw-gEje8ByB7_hXoICnHtVCrEwMQh-630tKvQWehc,91
259
+ returnn-1.20250225.201207.dist-info/top_level.txt,sha256=Lsn4WZc5Pbfk0-xDQOgnFCxOoqxL4CyeM3N1TFbJncw,8
260
+ returnn-1.20250225.201207.dist-info/RECORD,,