returnn 1.20250902.114352__py3-none-any.whl → 1.20250903.215851__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of returnn might be problematic. Click here for more details.

returnn/PKG-INFO CHANGED
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: returnn
3
- Version: 1.20250902.114352
3
+ Version: 1.20250903.215851
4
4
  Summary: The RWTH extensible training framework for universal recurrent neural networks
5
5
  Home-page: https://github.com/rwth-i6/returnn/
6
6
  Author: Albert Zeyer
@@ -1,2 +1,2 @@
1
- version = '1.20250902.114352'
2
- long_version = '1.20250902.114352+git.87030fa'
1
+ version = '1.20250903.215851'
2
+ long_version = '1.20250903.215851+git.7651133'
@@ -1195,7 +1195,10 @@ def reverse_sequence(tensor: Tensor, *, axis: Dim, handle_dynamic_dims: bool = T
1195
1195
  if not handle_dynamic_dims or not axis.need_masking():
1196
1196
  # noinspection PyProtectedMember
1197
1197
  return tensor._raw_backend.flip_no_mask(tensor, axis=axis)
1198
- indices = rf.combine_bc(axis.get_size_tensor(), "-", rf.range_over_dim(axis)) - 1
1198
+ indices = (
1199
+ rf.combine_bc(axis.get_size_tensor(device=tensor.device), "-", rf.range_over_dim(axis, device=tensor.device))
1200
+ - 1
1201
+ )
1199
1202
  return rf.gather(tensor, indices=indices, axis=axis, clip_to_valid=True)
1200
1203
 
1201
1204
 
@@ -2184,7 +2184,7 @@ class _DimMixin:
2184
2184
  other = other.dimension # makes matching easier
2185
2185
  if isinstance(other, int) and other == 1:
2186
2186
  return self
2187
- if self.is_constant_static_dim() and isinstance(other, _d.Dim):
2187
+ if self.is_constant_static_dim() and isinstance(other, _d.Dim) and not other.is_constant_static_dim():
2188
2188
  return self.dimension * other # use rmul
2189
2189
  cache_key = ("mul", other)
2190
2190
  cache = self.get_same_base()._make_extra().cache_dim_math
@@ -2571,14 +2571,19 @@ class _MathFindMatchingAdditive:
2571
2571
 
2572
2572
 
2573
2573
  def _math_find_matching_mult(start: Dim, other: Union[int, Dim], *, right: bool) -> Optional[Dim]:
2574
- if (isinstance(other, int) or other.is_constant_static_dim()) and start.is_constant_static_dim():
2574
+ # we assume, if other is Dim, then it is not constant static dim
2575
+ if isinstance(other, int) and start.is_constant_static_dim():
2575
2576
  return _math_get_dim_via_bin_op([start, other] if right else [other, start], "mul")
2576
2577
  c_op = start.derived_from_op
2577
2578
  if c_op and c_op.kind == "mul" and len(c_op.inputs) == 2:
2578
2579
  if right:
2579
2580
  return c_op.inputs[0] * (c_op.inputs[1] * other)
2580
- else:
2581
- return (other * c_op.inputs[0]) * c_op.inputs[1]
2581
+ # Don't do right=False -> (other * c_op.inputs[0]) * c_op.inputs[1],
2582
+ # because this can lead to infinite recursions,
2583
+ # and also we don't have a proper normalized form for multiplication.
2584
+ # However, if both left-most factors are constant static dims, then we can merge it.
2585
+ elif isinstance(other, int) and c_op.inputs[0].is_constant_static_dim():
2586
+ return (other * c_op.inputs[0].dimension) * c_op.inputs[1]
2582
2587
  return None
2583
2588
 
2584
2589
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: returnn
3
- Version: 1.20250902.114352
3
+ Version: 1.20250903.215851
4
4
  Summary: The RWTH extensible training framework for universal recurrent neural networks
5
5
  Home-page: https://github.com/rwth-i6/returnn/
6
6
  Author: Albert Zeyer
@@ -1,9 +1,9 @@
1
- returnn/PKG-INFO,sha256=zCN-KDwCaMFI82phyc-dsc6Fo_thXN-UOBfvd93s0bU,5215
1
+ returnn/PKG-INFO,sha256=S-XwueZUBumfIMPZ4Qb-kGHRIs68AQJmjdrJW7duTXw,5215
2
2
  returnn/__init__.py,sha256=biBtRsM0WZ406vShaeH-9WFoqJ8XwTbn6g0EeFJ7l8E,1012
3
3
  returnn/__main__.py,sha256=lHyZcu_0yc9f7Vf_Kfdy9PmeU0T76XVXnpalHi5WKro,31740
4
4
  returnn/__old_mod_loader__.py,sha256=nvsNY-xELdS_IPNkv66Q9Rmvg4dbGW0-EBRDcCmctos,7654
5
5
  returnn/__setup__.py,sha256=22kQn2fh11iPM0hLb2Fy5sLmoU1JGvmDxXRYuRgQkwU,4659
6
- returnn/_setup_info_generated.py,sha256=J31pQBS08nmbv7yxX4hOOWq1d__odaj7aX-8_sTiVXo,77
6
+ returnn/_setup_info_generated.py,sha256=qKXi9Lxgy7ppKbJEo1P_VbQHVIGgicUKX9qjJTVqo3o,77
7
7
  returnn/config.py,sha256=3tmKhB6FnQZaNdtcYsiB61JnEY--iZ2qmJ4yq0b6tE0,29140
8
8
  returnn/forward_iface.py,sha256=A_OJiaXsX4MlXQRzST86ylyxSUZbC402PQL1REcqHjM,911
9
9
  returnn/learning_rate_control.py,sha256=ZvWryAn_tv9DhV8sh1LV3eE34Yltl3On3mYZAG4hR9s,34684
@@ -80,7 +80,7 @@ returnn/frontend/_cache.py,sha256=Uao2xzfvVaKABk1fkxcpXzxKIGJaI9FwwlTvvoNUstk,85
80
80
  returnn/frontend/_numpy_backend.py,sha256=fZjks7p3dgxVZ6tSDazTTgBxNjJqXjfqgw_7mA7rDEE,9066
81
81
  returnn/frontend/_random_journal.py,sha256=_ktP_mjgx8vtQQGX_DofdhewJj0aPiczefTWeemPkmo,5457
82
82
  returnn/frontend/_utils.py,sha256=uVQldGHyYKIyhSEmumJ04ix5eP5tjZw4CEC0w6-zhyQ,12074
83
- returnn/frontend/array_.py,sha256=7uX5-Os2OyYUfC5soprIUx7rr-371yKf9DcckRKONXY,53855
83
+ returnn/frontend/array_.py,sha256=j6rayxqV4ki5vohH-ZC7N3J8_CouNCRRRP_pE89O-rE,53921
84
84
  returnn/frontend/attention.py,sha256=GKt-Xqnz8sIyXVrE0i4VCS7J2Wu7dmoH_BA0Cu8CrXQ,45769
85
85
  returnn/frontend/backend.py,sha256=iQ9w4xl8Ea7bgpb0VUaCKq50rV5Bl2E5J8Rhd-oqD_c,883
86
86
  returnn/frontend/build_from_dict.py,sha256=rfWa2rjjhIR_kIQED_nMrygrQBunS6unegzWTLVbC98,3017
@@ -154,7 +154,7 @@ returnn/sprint/extern_interface.py,sha256=l-v1X-Yg0UpTFe7Y3c4FwWOqpSNuv9Oy5EzqlK
154
154
  returnn/sprint/interface.py,sha256=1j5SB0V8hSW8A5song9ciZtcBnZoKKfNipk9ezOIMuA,36491
155
155
  returnn/tensor/README.md,sha256=X6BqcRLrPLPnwF9yR69uqIFrMnNluj9pBkOPHwNgzuo,501
156
156
  returnn/tensor/__init__.py,sha256=on6j5PEOQpck50UcsR4nJzJSDmoVy34z1Oq4efv6Ax0,154
157
- returnn/tensor/_dim_extra.py,sha256=rwtDR5WRS8wqgKj4WkPaWtaKa8UJYTrS76ZhX0W5bP4,115580
157
+ returnn/tensor/_dim_extra.py,sha256=D1lDB-zjF1tPhBQFApbui2AlyARdTx0hIFKRhTtk4T4,116033
158
158
  returnn/tensor/_tensor_extra.py,sha256=gbSl6HMtn8WFYloanew_RaNNwx3eCpnKv3UfCkntJiQ,164923
159
159
  returnn/tensor/_tensor_mixin_base.py,sha256=H5z86I0NejxrSgMH1c5oXQzBqS6L9HpvP4y7oegBaSc,643
160
160
  returnn/tensor/_tensor_op_overloads.py,sha256=HklwuTBjy7mH_665VKaCUdu-oC3aa7Uz1ZQiCz4jeZc,5448
@@ -253,8 +253,8 @@ returnn/util/sig_proc.py,sha256=Tjz0VOAVyqu2qDCF5HZ1JjALjcFsHcNkcd96WgZeKfE,7265
253
253
  returnn/util/task_system.py,sha256=y4sMVXQ25Qd2z0rx03uOlXlkE-jbCYC1Sjfn-XlraVU,26003
254
254
  returnn/util/train_proc_manager.py,sha256=Pjht28k6uz6BNQ47uW6Gf880iyq5q4wx7P_K2tmoAM8,3266
255
255
  returnn/util/watch_memory.py,sha256=BR5P2kvBN6UI81cE0_1WAA6Hd1SByLbBaiDxvLhPOew,4213
256
- returnn-1.20250902.114352.dist-info/LICENSE,sha256=ywBD_U2aD4vpuoIgNAsjIGBYydl0tVKll3De0Z8s77c,11041
257
- returnn-1.20250902.114352.dist-info/METADATA,sha256=zCN-KDwCaMFI82phyc-dsc6Fo_thXN-UOBfvd93s0bU,5215
258
- returnn-1.20250902.114352.dist-info/WHEEL,sha256=iAkIy5fosb7FzIOwONchHf19Qu7_1wCWyFNR5gu9nU0,91
259
- returnn-1.20250902.114352.dist-info/top_level.txt,sha256=Lsn4WZc5Pbfk0-xDQOgnFCxOoqxL4CyeM3N1TFbJncw,8
260
- returnn-1.20250902.114352.dist-info/RECORD,,
256
+ returnn-1.20250903.215851.dist-info/LICENSE,sha256=ywBD_U2aD4vpuoIgNAsjIGBYydl0tVKll3De0Z8s77c,11041
257
+ returnn-1.20250903.215851.dist-info/METADATA,sha256=S-XwueZUBumfIMPZ4Qb-kGHRIs68AQJmjdrJW7duTXw,5215
258
+ returnn-1.20250903.215851.dist-info/WHEEL,sha256=iAkIy5fosb7FzIOwONchHf19Qu7_1wCWyFNR5gu9nU0,91
259
+ returnn-1.20250903.215851.dist-info/top_level.txt,sha256=Lsn4WZc5Pbfk0-xDQOgnFCxOoqxL4CyeM3N1TFbJncw,8
260
+ returnn-1.20250903.215851.dist-info/RECORD,,