returnn 1.20250122.134518__tar.gz → 1.20250122.223647__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of returnn might be problematic. Click here for more details.

Files changed (473) hide show
  1. {returnn-1.20250122.134518/returnn.egg-info → returnn-1.20250122.223647}/PKG-INFO +1 -1
  2. returnn-1.20250122.223647/_setup_info_generated.py +2 -0
  3. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/_cache.py +2 -2
  4. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/_native/__init__.py +2 -0
  5. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/_random_journal.py +1 -1
  6. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/conv.py +1 -1
  7. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/dims.py +1 -1
  8. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/rec.py +2 -2
  9. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/run_ctx.py +6 -6
  10. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tensor/_dim_extra.py +46 -43
  11. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tensor/_tensor_extra.py +5 -5
  12. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tensor/_tensor_op_overloads.py +10 -0
  13. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tensor/dim.py +1 -1
  14. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tensor/tensor_dict.py +1 -1
  15. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tf/frontend_layers/_backend.py +2 -2
  16. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tf/frontend_layers/dims.py +2 -2
  17. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tf/frontend_layers/layer.py +9 -5
  18. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tf/frontend_layers/loop.py +2 -2
  19. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tf/frontend_layers/make_layer.py +1 -1
  20. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tf/frontend_layers/prev_tensor_ref.py +1 -1
  21. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tf/frontend_low_level/_backend.py +1 -1
  22. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tf/layers/base.py +28 -19
  23. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tf/layers/basic.py +22 -20
  24. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tf/layers/rec.py +43 -41
  25. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tf/network.py +3 -3
  26. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/torch/data/extern_data.py +2 -2
  27. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/util/basic.py +1 -1
  28. {returnn-1.20250122.134518 → returnn-1.20250122.223647/returnn.egg-info}/PKG-INFO +1 -1
  29. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/rf_utils.py +2 -2
  30. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_TFUtil.py +3 -3
  31. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/compile_tf_graph.py +2 -2
  32. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/torch_export_to_onnx.py +2 -2
  33. returnn-1.20250122.134518/_setup_info_generated.py +0 -2
  34. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/.editorconfig +0 -0
  35. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/.gitignore +0 -0
  36. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/.gitmodules +0 -0
  37. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/.kateconfig +0 -0
  38. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/CHANGELOG.md +0 -0
  39. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/CODEOWNERS +0 -0
  40. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/CONTRIBUTING.md +0 -0
  41. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/LICENSE +0 -0
  42. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/MANIFEST.in +0 -0
  43. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/README.rst +0 -0
  44. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/__init__.py +0 -0
  45. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/12AX.cluster_map +0 -0
  46. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/_setup_returnn_env.py +0 -0
  47. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/demo-fwd.config +0 -0
  48. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/demo-horovod-mpi.py +0 -0
  49. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/demo-horovod-mpi.py.sh +0 -0
  50. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/demo-horovod-mpi.sh +0 -0
  51. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/demo-hyper-param-tuning.config +0 -0
  52. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/demo-iter-dataset.py +0 -0
  53. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/demo-list-devices.py +0 -0
  54. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/demo-lua-torch-layer.config +0 -0
  55. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/demo-pretrain.config +0 -0
  56. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/demo-record-and-push-to-webserver.py +0 -0
  57. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/demo-returnn-as-framework.py +0 -0
  58. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/demo-rf-pt-benchmark.py +0 -0
  59. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/demo-rf.config +0 -0
  60. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/demo-rhn-enwik8.config +0 -0
  61. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/demo-sprint-interface.py +0 -0
  62. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/demo-tf-att-copy.config +0 -0
  63. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/demo-tf-attention.config +0 -0
  64. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/demo-tf-chunking-blstm.12ax.config +0 -0
  65. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/demo-tf-contribrnn-lstm.12ax.config +0 -0
  66. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/demo-tf-enc-dec.config +0 -0
  67. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/demo-tf-hard-att-copy.config +0 -0
  68. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/demo-tf-lstm-benchmark.py +0 -0
  69. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/demo-tf-maxgradnorm-lstm.12ax.config +0 -0
  70. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/demo-tf-native-lstm-lowmem.12ax.config +0 -0
  71. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/demo-tf-native-lstm.12ax.config +0 -0
  72. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/demo-tf-native-lstm2.12ax.config +0 -0
  73. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/demo-tf-native-lstm2.12ax.tuned.config +0 -0
  74. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/demo-tf-neural-transducer.12ax.config +0 -0
  75. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/demo-tf-rec-explicit-lstm.config +0 -0
  76. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/demo-tf-rec-explicit-rnn.config +0 -0
  77. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/demo-tf-rec-self-att.config +0 -0
  78. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/demo-tf-search-compiled-graph.py +0 -0
  79. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/demo-tf-vanilla-lstm.12ax.config +0 -0
  80. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/demo-timit-lstm-ctc.config +0 -0
  81. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/demo-torch.config +0 -0
  82. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/demo-upd-mult-model.lstm.12ax.config +0 -0
  83. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/demo.sh +0 -0
  84. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/mdlstm/IAM/IAM_lines/a01-000u-00.png +0 -0
  85. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/mdlstm/IAM/IAM_lines/a01-007-04.png +0 -0
  86. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/mdlstm/IAM/IAM_lines/a01-007-06.png +0 -0
  87. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/mdlstm/IAM/README.txt +0 -0
  88. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/mdlstm/IAM/chars.txt +0 -0
  89. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/mdlstm/IAM/config_demo +0 -0
  90. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/mdlstm/IAM/config_fwd +0 -0
  91. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/mdlstm/IAM/config_real +0 -0
  92. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/mdlstm/IAM/create_IAM_dataset.py +0 -0
  93. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/mdlstm/IAM/decode.py +0 -0
  94. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/mdlstm/IAM/features/raw/demo.h5 +0 -0
  95. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/mdlstm/IAM/go.sh +0 -0
  96. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/mdlstm/IAM/lines.txt +0 -0
  97. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/mdlstm/IAM/split/eval.txt +0 -0
  98. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/mdlstm/IAM/split/train.txt +0 -0
  99. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/mdlstm/IAM/split/valid.txt +0 -0
  100. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/mdlstm/README.md +0 -0
  101. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/mdlstm/artificial/create_test_h5.py +0 -0
  102. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/mdlstm/artificial/forwardconfig +0 -0
  103. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/mdlstm/artificial/go.sh +0 -0
  104. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/mdlstm/artificial/trainconfig +0 -0
  105. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/mdlstm/artificial_rgb/create_test_h5.py +0 -0
  106. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/mdlstm/artificial_rgb/forwardconfig +0 -0
  107. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/mdlstm/artificial_rgb/go.sh +0 -0
  108. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/demos/mdlstm/artificial_rgb/trainconfig +0 -0
  109. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/pyproject.toml +0 -0
  110. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/requirements.txt +0 -0
  111. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/__init__.py +0 -0
  112. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/__main__.py +0 -0
  113. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/__old_mod_loader__.py +0 -0
  114. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/__setup__.py +0 -0
  115. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/config.py +0 -0
  116. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/datasets/__init__.py +0 -0
  117. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/datasets/audio.py +0 -0
  118. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/datasets/basic.py +0 -0
  119. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/datasets/bundle_file.py +0 -0
  120. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/datasets/cached.py +0 -0
  121. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/datasets/cached2.py +0 -0
  122. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/datasets/distrib_files.py +0 -0
  123. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/datasets/generating.py +0 -0
  124. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/datasets/hdf.py +0 -0
  125. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/datasets/lm.py +0 -0
  126. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/datasets/map.py +0 -0
  127. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/datasets/meta.py +0 -0
  128. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/datasets/multi_proc.py +0 -0
  129. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/datasets/normalization_data.py +0 -0
  130. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/datasets/numpy_dump.py +0 -0
  131. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/datasets/postprocessing.py +0 -0
  132. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/datasets/raw_wav.py +0 -0
  133. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/datasets/sprint.py +0 -0
  134. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/datasets/stereo.py +0 -0
  135. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/datasets/text_dict.py +0 -0
  136. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/datasets/util/__init__.py +0 -0
  137. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/datasets/util/feature_extraction.py +0 -0
  138. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/datasets/util/strings.py +0 -0
  139. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/datasets/util/vocabulary.py +0 -0
  140. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/engine/__init__.py +0 -0
  141. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/engine/base.py +0 -0
  142. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/engine/batch.py +0 -0
  143. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/extern/WarpRna/__init__.py +0 -0
  144. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/extern/WarpRna/__main__.py +0 -0
  145. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/extern/WarpRna/warp-rna/.git +0 -0
  146. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/extern/WarpRna/warp-rna/.gitignore +0 -0
  147. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/extern/WarpRna/warp-rna/LICENSE +0 -0
  148. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/extern/WarpRna/warp-rna/README.md +0 -0
  149. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/extern/WarpRna/warp-rna/aligner.gif +0 -0
  150. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/extern/WarpRna/warp-rna/check.png +0 -0
  151. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/extern/WarpRna/warp-rna/core.cu +0 -0
  152. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/extern/WarpRna/warp-rna/core.h +0 -0
  153. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/extern/WarpRna/warp-rna/core_cpu.cpp +0 -0
  154. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/extern/WarpRna/warp-rna/pytorch_binding/LICENSE +0 -0
  155. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/extern/WarpRna/warp-rna/pytorch_binding/MANIFEST.in +0 -0
  156. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/extern/WarpRna/warp-rna/pytorch_binding/README.md +0 -0
  157. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/extern/WarpRna/warp-rna/pytorch_binding/binding.cpp +0 -0
  158. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/extern/WarpRna/warp-rna/pytorch_binding/core.cu +0 -0
  159. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/extern/WarpRna/warp-rna/pytorch_binding/core.h +0 -0
  160. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/extern/WarpRna/warp-rna/pytorch_binding/requirements.txt +0 -0
  161. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/extern/WarpRna/warp-rna/pytorch_binding/setup.py +0 -0
  162. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/extern/WarpRna/warp-rna/pytorch_binding/warp_rna/__init__.py +0 -0
  163. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/extern/WarpRna/warp-rna/pytorch_binding/warp_rna/test.py +0 -0
  164. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/extern/WarpRna/warp-rna/ref_rna.py +0 -0
  165. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/extern/WarpRna/warp-rna/tensorflow_binding/setup.py +0 -0
  166. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/extern/WarpRna/warp-rna/tensorflow_binding/src/warp_rna_op.cc +0 -0
  167. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/extern/WarpRna/warp-rna/tensorflow_binding/src/warp_rna_op_kernel_tmpl.h +0 -0
  168. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/extern/WarpRna/warp-rna/tensorflow_binding/warp_rna/__init__.py +0 -0
  169. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/extern/WarpRna/warp-rna/test.cpp +0 -0
  170. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/extern/__init__.py +0 -0
  171. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/extern/graph_editor/README.md +0 -0
  172. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/extern/graph_editor/__init__.py +0 -0
  173. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/extern/graph_editor/edit.py +0 -0
  174. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/extern/graph_editor/reroute.py +0 -0
  175. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/extern/graph_editor/select.py +0 -0
  176. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/extern/graph_editor/subgraph.py +0 -0
  177. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/extern/graph_editor/transform.py +0 -0
  178. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/extern/graph_editor/util.py +0 -0
  179. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/forward_iface.py +0 -0
  180. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/__init__.py +0 -0
  181. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/_backend.py +0 -0
  182. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/_native/backend.cpp +0 -0
  183. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/_native/backend.hpp +0 -0
  184. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/_native/module.cpp +0 -0
  185. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/_native/module.hpp +0 -0
  186. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/_native/py_utils.hpp +0 -0
  187. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/_native/tensor_ops.cpp +0 -0
  188. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/_native/tensor_ops.hpp +0 -0
  189. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/_numpy_backend.py +0 -0
  190. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/_utils.py +0 -0
  191. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/array_.py +0 -0
  192. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/attention.py +0 -0
  193. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/audio/__init__.py +0 -0
  194. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/audio/mel.py +0 -0
  195. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/audio/specaugment.py +0 -0
  196. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/backend.py +0 -0
  197. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/build_from_dict.py +0 -0
  198. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/cond.py +0 -0
  199. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/const.py +0 -0
  200. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/container.py +0 -0
  201. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/control_flow_ctx.py +0 -0
  202. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/conversions/__init__.py +0 -0
  203. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/conversions/espnet_e_branchformer.py +0 -0
  204. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/conversions/hf_llama.py +0 -0
  205. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/conversions/torch_nn.py +0 -0
  206. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/decoder/__init__.py +0 -0
  207. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/decoder/transformer.py +0 -0
  208. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/device.py +0 -0
  209. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/dropout.py +0 -0
  210. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/dtype.py +0 -0
  211. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/encoder/__init__.py +0 -0
  212. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/encoder/base.py +0 -0
  213. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/encoder/conformer.py +0 -0
  214. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/encoder/conformer_v2.py +0 -0
  215. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/encoder/e_branchformer.py +0 -0
  216. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/encoder/transformer.py +0 -0
  217. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/gradient.py +0 -0
  218. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/graph.py +0 -0
  219. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/hooks.py +0 -0
  220. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/init.py +0 -0
  221. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/label_smoothing.py +0 -0
  222. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/linear.py +0 -0
  223. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/loop.py +0 -0
  224. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/loss.py +0 -0
  225. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/math_.py +0 -0
  226. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/matmul.py +0 -0
  227. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/module.py +0 -0
  228. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/normalization.py +0 -0
  229. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/parameter.py +0 -0
  230. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/parametrizations.py +0 -0
  231. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/parametrize.py +0 -0
  232. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/piecewise_linear.py +0 -0
  233. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/rand.py +0 -0
  234. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/reduce.py +0 -0
  235. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/signal.py +0 -0
  236. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/state.py +0 -0
  237. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/stepwise_scheduler.py +0 -0
  238. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/tensor_array.py +0 -0
  239. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/frontend/types.py +0 -0
  240. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/import_/__init__.py +0 -0
  241. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/import_/common.py +0 -0
  242. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/import_/git.py +0 -0
  243. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/import_/import_.py +0 -0
  244. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/learning_rate_control.py +0 -0
  245. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/log.py +0 -0
  246. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/native_op.cpp +0 -0
  247. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/native_op.py +0 -0
  248. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/pretrain.py +0 -0
  249. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/sprint/__init__.py +0 -0
  250. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/sprint/cache.py +0 -0
  251. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/sprint/control.py +0 -0
  252. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/sprint/error_signals.py +0 -0
  253. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/sprint/extern_interface.py +0 -0
  254. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/sprint/interface.py +0 -0
  255. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tensor/README.md +0 -0
  256. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tensor/__init__.py +0 -0
  257. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tensor/_tensor_mixin_base.py +0 -0
  258. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tensor/control_flow_ctx.py +0 -0
  259. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tensor/marked_dim.py +0 -0
  260. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tensor/tensor.py +0 -0
  261. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tensor/utils.py +0 -0
  262. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tf/__init__.py +0 -0
  263. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tf/compat.py +0 -0
  264. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tf/data_pipeline.py +0 -0
  265. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tf/distributed.py +0 -0
  266. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tf/engine.py +0 -0
  267. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tf/frontend_layers/README.md +0 -0
  268. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tf/frontend_layers/__init__.py +0 -0
  269. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tf/frontend_layers/_utils.py +0 -0
  270. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tf/frontend_layers/cond.py +0 -0
  271. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tf/frontend_layers/config_entry_points.py +0 -0
  272. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tf/frontend_layers/debug_eager_mode.py +0 -0
  273. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tf/frontend_layers/masked_computation.py +0 -0
  274. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tf/frontend_layers/parameter_assign.py +0 -0
  275. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tf/frontend_low_level/__init__.py +0 -0
  276. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tf/horovod.py +0 -0
  277. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tf/hyper_param_tuning.py +0 -0
  278. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tf/layers/__init__.py +0 -0
  279. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tf/layers/segmental_model.py +0 -0
  280. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tf/layers/signal_processing.py +0 -0
  281. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tf/layers/variable.py +0 -0
  282. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tf/native_op.py +0 -0
  283. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tf/sprint.py +0 -0
  284. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tf/updater.py +0 -0
  285. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tf/util/__init__.py +0 -0
  286. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tf/util/basic.py +0 -0
  287. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tf/util/data.py +0 -0
  288. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tf/util/gradient_checkpoint.py +0 -0
  289. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tf/util/ken_lm.py +0 -0
  290. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/tf/util/open_fst.py +0 -0
  291. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/torch/README.md +0 -0
  292. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/torch/__init__.py +0 -0
  293. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/torch/data/__init__.py +0 -0
  294. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/torch/data/pipeline.py +0 -0
  295. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/torch/data/queued_data_iter.py +0 -0
  296. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/torch/data/returnn_dataset_wrapper.py +0 -0
  297. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/torch/data/tensor_utils.py +0 -0
  298. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/torch/distributed.py +0 -0
  299. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/torch/engine.py +0 -0
  300. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/torch/frontend/__init__.py +0 -0
  301. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/torch/frontend/_backend.py +0 -0
  302. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/torch/frontend/_rand.py +0 -0
  303. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/torch/frontend/bridge.py +0 -0
  304. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/torch/frontend/raw_ops.py +0 -0
  305. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/torch/optim/README.md +0 -0
  306. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/torch/optim/__init__.py +0 -0
  307. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/torch/optim/lion.py +0 -0
  308. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/torch/updater.py +0 -0
  309. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/torch/util/README.md +0 -0
  310. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/torch/util/__init__.py +0 -0
  311. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/torch/util/array_.py +0 -0
  312. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/torch/util/debug_inf_nan.py +0 -0
  313. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/torch/util/diagnose_gpu.py +0 -0
  314. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/torch/util/exception_helper.py +0 -0
  315. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/torch/util/gradient_checkpoint.py +0 -0
  316. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/torch/util/module.py +0 -0
  317. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/torch/util/scaled_gradient.py +0 -0
  318. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/util/__init__.py +0 -0
  319. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/util/better_exchook.py +0 -0
  320. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/util/bpe.py +0 -0
  321. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/util/debug.py +0 -0
  322. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/util/debug_helpers.py +0 -0
  323. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/util/file_cache.py +0 -0
  324. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/util/fsa.py +0 -0
  325. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/util/literal_py_to_pickle.py +0 -0
  326. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/util/lru_cache.py +0 -0
  327. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/util/math.py +0 -0
  328. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/util/multi_proc_non_daemonic_spawn.py +0 -0
  329. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/util/native_code_compiler.py +0 -0
  330. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/util/pprint.py +0 -0
  331. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/util/py-to-pickle.cpp +0 -0
  332. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/util/py_ext_mod_compiler.py +0 -0
  333. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/util/result_with_reason.py +0 -0
  334. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/util/sig_proc.py +0 -0
  335. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/util/task_system.py +0 -0
  336. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/util/train_proc_manager.py +0 -0
  337. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn/util/watch_memory.py +0 -0
  338. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn.egg-info/SOURCES.txt +0 -0
  339. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn.egg-info/dependency_links.txt +0 -0
  340. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/returnn.egg-info/top_level.txt +0 -0
  341. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/rnn.py +0 -0
  342. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/setup.cfg +0 -0
  343. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/setup.py +0 -0
  344. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/DummySprintExec.py +0 -0
  345. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/PyCharm-inspection-profile.xml +0 -0
  346. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/PyCharm.idea/.gitignore +0 -0
  347. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/PyCharm.idea/.name +0 -0
  348. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/PyCharm.idea/codeStyleSettings.xml +0 -0
  349. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/PyCharm.idea/codeStyles/Project.xml +0 -0
  350. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/PyCharm.idea/codeStyles/codeStyleConfig.xml +0 -0
  351. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/PyCharm.idea/inspectionProfiles/Project_Default.xml +0 -0
  352. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/PyCharm.idea/inspectionProfiles/profiles_settings.xml +0 -0
  353. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/PyCharm.idea/misc.xml +0 -0
  354. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/PyCharm.idea/modules.xml +0 -0
  355. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/PyCharm.idea/returnn.iml +0 -0
  356. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/PyCharm.idea/scopes/scope_settings.xml +0 -0
  357. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/_set_num_threads1.py +0 -0
  358. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/_setup_returnn_env.py +0 -0
  359. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/_setup_test_env.py +0 -0
  360. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/bpe-unicode-demo.codes +0 -0
  361. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/bpe-unicode-demo.vocab +0 -0
  362. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/lexicon_opt.fst +0 -0
  363. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/lexicon_opt.isyms +0 -0
  364. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/lexicon_opt.jpg +0 -0
  365. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/lexicon_opt.osyms +0 -0
  366. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/lint_common.py +0 -0
  367. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/pycharm-inspect.py +0 -0
  368. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/pylint.py +0 -0
  369. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/returnn-as-framework.py +0 -0
  370. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/spelling.dic +0 -0
  371. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_Config.py +0 -0
  372. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_Dataset.py +0 -0
  373. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_Fsa.py +0 -0
  374. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_GeneratingDataset.py +0 -0
  375. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_HDFDataset.py +0 -0
  376. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_LearningRateControl.py +0 -0
  377. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_Log.py +0 -0
  378. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_MultiProcDataset.py +0 -0
  379. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_Pretrain.py +0 -0
  380. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_ResNet.py +0 -0
  381. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_SprintDataset.py +0 -0
  382. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_SprintInterface.py +0 -0
  383. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_TFEngine.py +0 -0
  384. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_TFNativeOp.py +0 -0
  385. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_TFNetworkLayer.py +0 -0
  386. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_TFNetworkRecLayer.py +0 -0
  387. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_TFNetworkSigProcLayer.py +0 -0
  388. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_TFUpdater.py +0 -0
  389. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_TF_determinism.py +0 -0
  390. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_TaskSystem.py +0 -0
  391. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_TaskSystem_SharedMem.py +0 -0
  392. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_TranslationDataset.py +0 -0
  393. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_Util.py +0 -0
  394. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_demos.py +0 -0
  395. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_fork_exec.py +0 -0
  396. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_hdf_dump.py +0 -0
  397. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_rf_array.py +0 -0
  398. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_rf_attention.py +0 -0
  399. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_rf_base.py +0 -0
  400. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_rf_cond.py +0 -0
  401. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_rf_const.py +0 -0
  402. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_rf_container.py +0 -0
  403. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_rf_conv.py +0 -0
  404. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_rf_decoder_transformer.py +0 -0
  405. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_rf_encoder_conformer.py +0 -0
  406. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_rf_gradient.py +0 -0
  407. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_rf_label_smoothing.py +0 -0
  408. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_rf_loop.py +0 -0
  409. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_rf_math.py +0 -0
  410. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_rf_normalization.py +0 -0
  411. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_rf_piecewise_linear.py +0 -0
  412. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_rf_rec.py +0 -0
  413. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_rf_reduce.py +0 -0
  414. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_rf_signal.py +0 -0
  415. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_tensor.py +0 -0
  416. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_threading.py +0 -0
  417. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_tools.py +0 -0
  418. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_torch_dataset.py +0 -0
  419. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_torch_engine.py +0 -0
  420. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_torch_frontend.py +0 -0
  421. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_torch_internal_frontend.py +0 -0
  422. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/test_torch_util.py +0 -0
  423. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tests/torch_utils.py +0 -0
  424. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/_setup_returnn_env.py +0 -0
  425. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/analyze-dataset-batches.py +0 -0
  426. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/bliss-collect-seq-lens.py +0 -0
  427. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/bliss-dump-text.py +0 -0
  428. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/bliss-get-segment-names.py +0 -0
  429. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/bliss-to-ogg-zip.py +0 -0
  430. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/bpe-create-lexicon.py +0 -0
  431. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/calculate-word-error-rate.py +0 -0
  432. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/cleanup-old-models.py +0 -0
  433. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/collect-orth-symbols.py +0 -0
  434. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/collect-words.py +0 -0
  435. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/compile_native_op.py +0 -0
  436. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/debug-dump-search-scores.py +0 -0
  437. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/debug-plot-search-scores.py +0 -0
  438. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/dump-dataset-raw-strings.py +0 -0
  439. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/dump-dataset.py +0 -0
  440. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/dump-forward-stats.py +0 -0
  441. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/dump-forward.py +0 -0
  442. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/dump-network-json.py +0 -0
  443. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/dump-pickle.py +0 -0
  444. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/extract_state_tying_from_dataset.py +0 -0
  445. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/get-attention-weights.py +0 -0
  446. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/get-best-model-epoch.py +0 -0
  447. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/hdf_dump.py +0 -0
  448. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/hdf_dump_translation_dataset.py +0 -0
  449. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/import-blocks-mt-model.py +0 -0
  450. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/import-t2t-mt-model.py +0 -0
  451. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/lattice_rescorer/.gitignore +0 -0
  452. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/lattice_rescorer/Makefile +0 -0
  453. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/lattice_rescorer/README.md +0 -0
  454. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/lattice_rescorer/example/README.md +0 -0
  455. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/lattice_rescorer/example/libs_list +0 -0
  456. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/lattice_rescorer/example/network.040/i600_m600_m600.sgd_b16_lr0_cl2.newbobabs.config +0 -0
  457. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/lattice_rescorer/example/network.040/i600_m600_m600.sgd_b16_lr0_cl2.newbobabs.keep_over_epoch.lstm2.config +0 -0
  458. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/lattice_rescorer/example/rescore_lattice.sh +0 -0
  459. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/lattice_rescorer/example/state_vars_list +0 -0
  460. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/lattice_rescorer/example/tensor_names_list +0 -0
  461. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/lattice_rescorer/file.h +0 -0
  462. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/lattice_rescorer/htklatticerescorer.cc +0 -0
  463. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/lattice_rescorer/htklatticerescorer.h +0 -0
  464. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/lattice_rescorer/main.cc +0 -0
  465. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/lattice_rescorer/rescorer.h +0 -0
  466. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/lattice_rescorer/vocabulary.cc +0 -0
  467. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/lattice_rescorer/vocabulary.h +0 -0
  468. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/tf_avg_checkpoints.py +0 -0
  469. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/tf_inspect_checkpoint.py +0 -0
  470. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/tf_inspect_summary_log.py +0 -0
  471. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/torch_avg_checkpoints.py +0 -0
  472. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/torch_inspect_checkpoint.py +0 -0
  473. {returnn-1.20250122.134518 → returnn-1.20250122.223647}/tools/torch_inspect_checkpoint_and_opt.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: returnn
3
- Version: 1.20250122.134518
3
+ Version: 1.20250122.223647
4
4
  Summary: The RWTH extensible training framework for universal recurrent neural networks
5
5
  Home-page: https://github.com/rwth-i6/returnn/
6
6
  Author: Albert Zeyer
@@ -0,0 +1,2 @@
1
+ version = '1.20250122.223647'
2
+ long_version = '1.20250122.223647+git.6dd961c'
@@ -176,7 +176,7 @@ class DimWrapper:
176
176
  # We need some ref to the dyn size, and finalize this key when it goes out of scope.
177
177
  # This is only needed when there is no info on the static size (or eager scalar dyn size).
178
178
  ref(dim.dyn_size_ext.raw_tensor, finalize_callback)
179
- if self.dim_value is None and dim.dyn_size_ext and dim.dyn_size_ext.raw_tensor is not None
179
+ if self.dim_value is None and dim.dyn_size_ext is not None and dim.dyn_size_ext.raw_tensor is not None
180
180
  else None
181
181
  )
182
182
  self._hash = hash(dim) if self.dim_value is None else hash(self.dim_value)
@@ -195,7 +195,7 @@ class DimWrapper:
195
195
  def _dim_value_for_key(dim: Dim) -> Optional[int]:
196
196
  if dim.size is not None:
197
197
  return dim.size
198
- if dim.dyn_size_ext and not dim.dyn_size_ext.dims:
198
+ if dim.dyn_size_ext is not None and not dim.dyn_size_ext.dims:
199
199
  if dim.dyn_size_ext.raw_tensor is not None:
200
200
  # noinspection PyProtectedMember
201
201
  if dim.dyn_size_ext._raw_backend.executing_eagerly():
@@ -112,6 +112,8 @@ def setup():
112
112
  for name, cur_func in _TensorOpOverloadsMixin.__dict__.items(): # just all of them
113
113
  if not callable(cur_func):
114
114
  continue
115
+ if name in {"__bool__"}: # some exceptions
116
+ continue
115
117
  assert name.startswith("__") and name.endswith("__")
116
118
  native_func = getattr(mod, "_tensor_" + name[2:-2] + "_instancemethod")
117
119
  assert callable(native_func)
@@ -58,7 +58,7 @@ class RandomJournal:
58
58
  """read next"""
59
59
  assert self._cur_entry_idx < len(self._entries)
60
60
  entry = self._entries[self._cur_entry_idx]
61
- if new_out_template:
61
+ if new_out_template is not None:
62
62
  assert new_out_template.dtype == entry.out.dtype, (
63
63
  f"random journal entry dtype mismatch,"
64
64
  f" expected {new_out_template}, got {entry.out} at index {self._cur_entry_idx}"
@@ -649,7 +649,7 @@ def make_conv_out_spatial_dims(
649
649
  assert isinstance(out_spatial_dim, Dim)
650
650
  if description_prefix and out_spatial_dim != in_spatial_dim:
651
651
  out_spatial_dim.name = f"{description_prefix}:spatial{i}"
652
- if in_spatial_dim.dyn_size_ext and out_spatial_dim.dyn_size_ext is None:
652
+ if in_spatial_dim.dyn_size_ext is not None and out_spatial_dim.dyn_size_ext is None:
653
653
  out_spatial_dim.dyn_size_ext = _calc_out_dim(
654
654
  in_dim=in_spatial_dim.dyn_size_ext,
655
655
  filter_size=filter_size[i],
@@ -166,7 +166,7 @@ def num_elements_of_shape(
166
166
  for j, dim_ in enumerate(dims):
167
167
  if i == j:
168
168
  continue
169
- if dim_.dyn_size_ext and dim in dim_.dyn_size_ext.dims:
169
+ if dim_.dyn_size_ext is not None and dim in dim_.dyn_size_ext.dims:
170
170
  related_dims.append(dim_)
171
171
  if not related_dims:
172
172
  if dim.is_static():
@@ -52,7 +52,7 @@ class LSTM(rf.Module):
52
52
  :return: output of shape {...,out_dim} if spatial_dim is single_step_dim else {...,spatial_dim,out_dim},
53
53
  and new state of the LSTM.
54
54
  """
55
- if not state.h or not state.c:
55
+ if state.h is None or state.c is None:
56
56
  raise ValueError(f"{self}: state {state} needs attributes ``h`` (hidden) and ``c`` (cell).")
57
57
  if self.in_dim not in source.dims:
58
58
  raise ValueError(f"{self}: input {source} does not have in_dim {self.in_dim}")
@@ -188,7 +188,7 @@ class ZoneoutLSTM(LSTM):
188
188
  :return: output of shape {...,out_dim} if spatial_dim is single_step_dim else {...,spatial_dim,out_dim},
189
189
  and new state of the LSTM.
190
190
  """
191
- if not state.h or not state.c:
191
+ if state.h is None or state.c is None:
192
192
  raise ValueError(f"{self}: state {state} needs attributes ``h`` (hidden) and ``c`` (cell).")
193
193
  if self.in_dim not in source.dims:
194
194
  raise ValueError(f"{self}: input {source} does not have in_dim {self.in_dim}")
@@ -272,9 +272,9 @@ class RunCtx:
272
272
  assert dims is None or (
273
273
  isinstance(dims, (list, tuple)) and all(isinstance(dim, Dim) for dim in dims)
274
274
  ), f"dims should be a tuple of Dims, got {dims}"
275
- if dims is None and expected_output:
275
+ if dims is None and expected_output is not None:
276
276
  dims = expected_output.dims
277
- if dims is not None and expected_output:
277
+ if dims is not None and expected_output is not None:
278
278
  assert expected_output.dims == tuple(
279
279
  dims
280
280
  ), f"mark_as_output: {name!r} dims mismatch from expected output, given {dims}, expected {expected_output}"
@@ -304,7 +304,7 @@ class RunCtx:
304
304
  assert name not in self.outputs.data
305
305
  self.outputs.data[name] = tensor
306
306
 
307
- if expected_output:
307
+ if expected_output is not None:
308
308
  # Perform sanity checks using the expected output.
309
309
  # The expected output usually comes from `model_outputs` from the user config.
310
310
  # The dimensions of `expected_output` and `tensor` should match,
@@ -429,7 +429,7 @@ class Loss:
429
429
  """
430
430
  if self._mean_loss_cached is not None:
431
431
  return self._mean_loss_cached
432
- if self.custom_inv_norm_factor:
432
+ if self.custom_inv_norm_factor is not None:
433
433
  loss = self.get_summed_loss()
434
434
  inv_norm = rf.reduce_sum(self.custom_inv_norm_factor, axis=self.custom_inv_norm_factor.dims)
435
435
  inv_norm = rf.cast(inv_norm, loss.dtype)
@@ -446,7 +446,7 @@ class Loss:
446
446
  """
447
447
  :return: inverse norm factor (scalar)
448
448
  """
449
- if self.custom_inv_norm_factor:
449
+ if self.custom_inv_norm_factor is not None:
450
450
  if self.custom_inv_norm_factor.dims:
451
451
  return rf.reduce_sum(self.custom_inv_norm_factor, axis=self.custom_inv_norm_factor.dims)
452
452
  return self.custom_inv_norm_factor
@@ -500,7 +500,7 @@ def _output_tensor_from_raw(raw_tensor, *, dims: Optional[Sequence[Dim]], name:
500
500
  assert isinstance(raw_tensor, _backend.global_backend.RawTensorType)
501
501
  tensor = rf.convert_to_tensor(raw_tensor, dims=dims)
502
502
  for axis, dim in enumerate(tensor.dims):
503
- if dim.dyn_size_ext and dim.dyn_size_ext.raw_tensor is None:
503
+ if dim.dyn_size_ext is not None and dim.dyn_size_ext.raw_tensor is None:
504
504
  # Only non-scalar dyn sizes matter.
505
505
  if dim.dyn_size_ext.dims:
506
506
  raise Exception(
@@ -94,7 +94,7 @@ class _DimExtra:
94
94
  if derived_from_op and not derived_from_op.output:
95
95
  derived_from_op.output = dim
96
96
  self.match_priority = match_priority
97
- if src_data:
97
+ if src_data is not None:
98
98
  assert isinstance(src_data, _t.Tensor) and isinstance(src_axis, int)
99
99
  if not batch and dim.dyn_size_ext is not None:
100
100
  batch = dim.dyn_size_ext.batch
@@ -421,12 +421,12 @@ class _DimMixin:
421
421
 
422
422
  :param func: operates inplace
423
423
  """
424
- dyn_size_ext = self.dyn_size_ext.copy() if self.dyn_size_ext else None
425
- dyn_size_ext_max = self._dyn_size_max_value if self._dyn_size_max_value else None
424
+ dyn_size_ext = self.dyn_size_ext.copy() if self.dyn_size_ext is not None else None
425
+ dyn_size_ext_max = self._dyn_size_max_value if self._dyn_size_max_value is not None else None
426
426
  self.reset_raw(only_self=True)
427
- if dyn_size_ext:
427
+ if dyn_size_ext is not None:
428
428
  func(dyn_size_ext)
429
- if dyn_size_ext_max:
429
+ if dyn_size_ext_max is not None:
430
430
  func(dyn_size_ext_max)
431
431
  self.dyn_size_ext = dyn_size_ext
432
432
  self._dyn_size_max_value = dyn_size_ext_max
@@ -458,8 +458,8 @@ class _DimMixin:
458
458
  """
459
459
  :rtype: bool
460
460
  """
461
- if (self.dyn_size_ext and not self.dyn_size_ext.is_valid_in_current_graph()) or (
462
- self._dyn_size_max_value and not self._dyn_size_max_value.is_valid_in_current_graph()
461
+ if (self.dyn_size_ext is not None and not self.dyn_size_ext.is_valid_in_current_graph()) or (
462
+ self._dyn_size_max_value is not None and not self._dyn_size_max_value.is_valid_in_current_graph()
463
463
  ): # maybe from an earlier run which reuses the dim tag
464
464
  # Reset and cleanup.
465
465
  self.reset_batch_ctx()
@@ -474,7 +474,7 @@ class _DimMixin:
474
474
  if not self._extra:
475
475
  return
476
476
  if not self.batch:
477
- if self.dyn_size_ext and self.dyn_size_ext.batch:
477
+ if self.dyn_size_ext is not None and self.dyn_size_ext.batch:
478
478
  self.batch = self.dyn_size_ext.batch
479
479
  else:
480
480
  return
@@ -482,21 +482,21 @@ class _DimMixin:
482
482
  if not extra:
483
483
  return
484
484
  key = (self.batch, self.control_flow_ctx)
485
- if self.dyn_size_ext and key not in extra.same_for_batch_ctx:
485
+ if self.dyn_size_ext is not None and key not in extra.same_for_batch_ctx:
486
486
  extra.same_for_batch_ctx[key] = self
487
487
  # Check if we can find more
488
488
  if key in extra.same_for_batch_ctx:
489
489
  same = extra.same_for_batch_ctx[key]
490
490
  if same is not self:
491
- if same.dyn_size_ext and self.dyn_size_ext is None:
491
+ if same.dyn_size_ext is not None and self.dyn_size_ext is None:
492
492
  self.dyn_size_ext = same.dyn_size_ext
493
- if same.dyn_size_ext and same.dyn_size_ext.placeholder is not None:
493
+ if same.dyn_size_ext is not None and same.dyn_size_ext.placeholder is not None:
494
494
  if self.dyn_size_ext.placeholder is None:
495
495
  self.dyn_size_ext = same.dyn_size_ext
496
- if self.dyn_size_ext and same.dyn_size_ext is None:
496
+ if self.dyn_size_ext is not None and same.dyn_size_ext is None:
497
497
  same.dyn_size_ext = self.dyn_size_ext
498
- if self.dyn_size_ext and self.dyn_size_ext.placeholder is not None:
499
- if not same.dyn_size_ext or same.dyn_size_ext.placeholder is None:
498
+ if self.dyn_size_ext is not None and self.dyn_size_ext.placeholder is not None:
499
+ if same.dyn_size_ext is None or same.dyn_size_ext.placeholder is None:
500
500
  same.dyn_size_ext = self.dyn_size_ext
501
501
  # noinspection PyProtectedMember
502
502
  if self._dyn_size_max_value is None and same._dyn_size_max_value is not None:
@@ -578,7 +578,7 @@ class _DimMixin:
578
578
  else:
579
579
  raise NotImplementedError("not yet implemented: multiple derived ctxs: %r" % (derived_ctxs,))
580
580
  if dim_tag:
581
- assert not dim_tag.dyn_size_ext
581
+ assert dim_tag.dyn_size_ext is None
582
582
  dyn_size_ext = None
583
583
  # Maybe we have sth with the base batch without beam or padded batch which we can extend.
584
584
  if batch != batch.get_global_base():
@@ -614,7 +614,7 @@ class _DimMixin:
614
614
  dyn_size_ext.placeholder, "_RETURNN_beam_expanded_base_data", None
615
615
  )
616
616
  if batch.beam:
617
- assert beam_expanded_base_data
617
+ assert beam_expanded_base_data is not None
618
618
  # Note: The beam expansion used tiling, which can be cached.
619
619
  # This means that we could end up with the same size tensor (placeholder)
620
620
  # for multiple different beams,
@@ -634,17 +634,17 @@ class _DimMixin:
634
634
  if batch.beam:
635
635
  dyn_size_ext.placeholder._RETURNN_dyn_size_beam = batch.beam
636
636
  dyn_size_ext.placeholder._RETURNN_beam_expanded_base_data = beam_expanded_base_data
637
- if not dyn_size_ext:
637
+ if dyn_size_ext is None:
638
638
  # Maybe we can infer dyn_size_ext, even with different batch.
639
639
  # Keep logic in sync with is_dim_known_in_batch_ctx.
640
640
  candidates = [self, same_base] + list(same_base_extra.same_for_batch_ctx.values())
641
641
  for other in candidates:
642
- if other.dyn_size_ext and ControlFlowContext.is_parent_or_same(other.control_flow_ctx, ctx):
642
+ if other.dyn_size_ext is not None and ControlFlowContext.is_parent_or_same(other.control_flow_ctx, ctx):
643
643
  dyn_size_ext = other.dyn_size_ext.copy_template()
644
644
  dyn_size_ext.beam = batch.beam
645
645
  dyn_size_ext.batch = batch
646
646
  break
647
- if dyn_size_ext:
647
+ if dyn_size_ext is not None:
648
648
  ctx = dyn_size_ext.control_flow_ctx
649
649
  elif dim_tag:
650
650
  ctx = dim_tag.control_flow_ctx
@@ -658,7 +658,7 @@ class _DimMixin:
658
658
  # or it is defined for the same batch and ctx.
659
659
  # In any case, reuse it then.
660
660
  candidate.batch = batch
661
- if dyn_size_ext:
661
+ if dyn_size_ext is not None:
662
662
  if candidate.dyn_size_ext is not None:
663
663
  candidate.dyn_size_ext.batch = batch
664
664
  assert candidate.dyn_size_ext.dim_tags == dyn_size_ext.dim_tags
@@ -684,11 +684,11 @@ class _DimMixin:
684
684
  dyn_size_ext=dyn_size_ext,
685
685
  )
686
686
  dim_tag.same_as = same_base
687
- if dyn_size_ext and dyn_size_ext.placeholder is not None:
687
+ if dyn_size_ext is not None and dyn_size_ext.placeholder is not None:
688
688
  if _d.Dim.get_tag_from_size_tensor(dyn_size_ext.placeholder) is None:
689
689
  dim_tag.set_tag_on_size_tensor(dyn_size_ext.placeholder, batch=batch)
690
690
  same_base_extra.same_for_batch_ctx[(batch, ctx)] = dim_tag
691
- if dyn_size_ext:
691
+ if dyn_size_ext is not None:
692
692
  if dim_tag.dyn_size_ext is None:
693
693
  dim_tag.dyn_size_ext = dyn_size_ext
694
694
  else:
@@ -709,7 +709,7 @@ class _DimMixin:
709
709
  self._extra.cache_dyn_size_ext_dev.clear()
710
710
  self.batch = None
711
711
  self.control_flow_ctx = None
712
- if self.dyn_size_ext and self.dyn_size_ext.batch:
712
+ if self.dyn_size_ext is not None and self.dyn_size_ext.batch:
713
713
  self.dyn_size_ext = self.dyn_size_ext.copy_template()
714
714
  self.dyn_size_ext.batch = None
715
715
  self.dyn_size_ext.control_flow_ctx = None
@@ -771,7 +771,7 @@ class _DimMixin:
771
771
 
772
772
  :param tf.Tensor dyn_size:
773
773
  """
774
- if self.dyn_size_ext and self.dyn_size_ext.placeholder is dyn_size: # fast path check
774
+ if self.dyn_size_ext is not None and self.dyn_size_ext.placeholder is dyn_size: # fast path check
775
775
  return
776
776
  assert self.can_be_used_as_dim()
777
777
  other = _d.Dim.get_tag_from_size_tensor(dyn_size)
@@ -787,7 +787,7 @@ class _DimMixin:
787
787
  return
788
788
  self._init_default_dyn_size_ext(dyn_size)
789
789
  self.set_tag_on_size_tensor(dyn_size)
790
- assert self.dyn_size_ext.placeholder is dyn_size
790
+ assert self.dyn_size_ext is not None and self.dyn_size_ext.placeholder is dyn_size
791
791
 
792
792
  def _init_default_dyn_size_ext(self, dyn_size):
793
793
  """
@@ -814,7 +814,7 @@ class _DimMixin:
814
814
  """
815
815
  :return: dyn_size_ext on the device
816
816
  """
817
- assert self.dyn_size_ext
817
+ assert self.dyn_size_ext is not None
818
818
  if not device or device == "cpu":
819
819
  return self.dyn_size_ext
820
820
 
@@ -837,7 +837,7 @@ class _DimMixin:
837
837
  """
838
838
  import returnn.frontend as rf
839
839
 
840
- assert self.dyn_size_ext and self.dyn_size_ext.raw_tensor is not None
840
+ assert self.dyn_size_ext is not None and self.dyn_size_ext.raw_tensor is not None
841
841
  # noinspection PyProtectedMember
842
842
  backend = self.dyn_size_ext._raw_backend
843
843
 
@@ -914,7 +914,7 @@ class _DimMixin:
914
914
  """
915
915
  if self.is_batch_dim():
916
916
  return True
917
- if not self.dyn_size_ext and self.dimension is not None:
917
+ if self.dyn_size_ext is None and self.dimension is not None:
918
918
  return True
919
919
  if self.dyn_size_ext is not None:
920
920
  return True
@@ -937,7 +937,7 @@ class _DimMixin:
937
937
  return True
938
938
  dim = self.get_for_batch_ctx(batch=batch, ctx=ctx, allow_none=True)
939
939
  if dim:
940
- return bool(dim.dyn_size_ext)
940
+ return dim.dyn_size_ext is not None
941
941
  candidates = [self, self.get_same_base()]
942
942
  if self._extra:
943
943
  candidates += list(self._extra.same_for_batch_ctx.values())
@@ -953,7 +953,8 @@ class _DimMixin:
953
953
  :return: whether the dim is not static. usually means that it has seq lengths
954
954
  """
955
955
  return self.dimension is None and (
956
- (self.dyn_size_ext and self.dyn_size_ext.dims) or (not self.dyn_size_ext and not self.is_batch_dim())
956
+ (self.dyn_size_ext is not None and self.dyn_size_ext.dims)
957
+ or (self.dyn_size_ext is None and not self.is_batch_dim())
957
958
  )
958
959
 
959
960
  def is_dynamic(self) -> bool:
@@ -997,7 +998,7 @@ class _DimMixin:
997
998
  :return: whether this dim tag for this specific batch (incl beam) is the same as the given size
998
999
  :rtype: bool
999
1000
  """
1000
- if self.dyn_size_ext and x is self.dyn_size_ext.placeholder:
1001
+ if self.dyn_size_ext is not None and x is self.dyn_size_ext.placeholder:
1001
1002
  return True
1002
1003
  tag = _DimMixin.get_tag_from_size_tensor(x)
1003
1004
  if tag and tag == self:
@@ -1084,7 +1085,7 @@ class _DimMixin:
1084
1085
  self.batch = batch # overtake
1085
1086
  if not self.is_batch_dim() and self.is_dynamic():
1086
1087
  if same_as_before:
1087
- assert self.dyn_size_ext and self.dyn_size_ext.placeholder is not None
1088
+ assert self.dyn_size_ext is not None and self.dyn_size_ext.placeholder is not None
1088
1089
  # Do not overwrite it.
1089
1090
  else:
1090
1091
  self._init_default_dyn_size_ext(x)
@@ -1109,7 +1110,7 @@ class _DimMixin:
1109
1110
  if self.is_static():
1110
1111
  return
1111
1112
  self._validate_in_current_graph()
1112
- if self.dyn_size_ext and (self.dyn_size_ext.placeholder is not None or template_only):
1113
+ if self.dyn_size_ext is not None and (self.dyn_size_ext.placeholder is not None or template_only):
1113
1114
  return
1114
1115
  same_base = self.get_same_base()
1115
1116
  op = self.derived_from_op or same_base.derived_from_op
@@ -1126,7 +1127,7 @@ class _DimMixin:
1126
1127
  for x_dim in op.inputs:
1127
1128
  if self.batch:
1128
1129
  x_dim = x_dim.get_for_batch_ctx(self.batch, self.control_flow_ctx)
1129
- if x_dim.dyn_size_ext and x_dim.dyn_size_ext.raw_tensor is not None:
1130
+ if x_dim.dyn_size_ext is not None and x_dim.dyn_size_ext.raw_tensor is not None:
1130
1131
  # noinspection PyProtectedMember
1131
1132
  backend = x_dim.dyn_size_ext._raw_backend
1132
1133
  break
@@ -1274,12 +1275,12 @@ class _DimMixin:
1274
1275
  if self.batch:
1275
1276
  x_dim = x_dim.get_for_batch_ctx(self.batch, self.control_flow_ctx)
1276
1277
  x_dim.complete_dyn_size(template_only=template_only, _backend=backend)
1277
- if not x_dim.dyn_size_ext and not x_dim.dimension:
1278
+ if x_dim.dyn_size_ext is None and not x_dim.dimension:
1278
1279
  return
1279
1280
  y = _bin_op(y, x_dim.dimension or x_dim.dyn_size_ext)
1280
1281
  if not template_only and y.raw_tensor is not None:
1281
1282
  y_max_value = _bin_op(y_max_value, x_dim.get_dim_value_tensor())
1282
- assert y, f"op {op}?"
1283
+ assert y is not None, f"op {op}?"
1283
1284
  if self.dyn_size_ext is not None:
1284
1285
  assert self.dyn_size_ext.dim_tags == y.dim_tags
1285
1286
  if y.batch:
@@ -1288,8 +1289,8 @@ class _DimMixin:
1288
1289
  else:
1289
1290
  self.batch = y.batch
1290
1291
  self.dyn_size_ext = y
1291
- if not template_only and y_max_value:
1292
- assert y_max_value and y_max_value.raw_tensor is not None
1292
+ if not template_only and y_max_value is not None:
1293
+ assert y_max_value is not None and y_max_value.raw_tensor is not None
1293
1294
  self._dyn_size_max_value = y_max_value
1294
1295
  if tf and y.placeholder is not None:
1295
1296
  self.set_tag_on_size_tensor(y.placeholder)
@@ -1695,7 +1696,9 @@ class _DimMixin:
1695
1696
  assert isinstance(self._extra.src_axis, int)
1696
1697
  # Maybe it changed in the meanwhile, so check.
1697
1698
  tag = self._extra.src_data.get_dim_tag(self._extra.src_axis)
1698
- if tag.description == self.description and (not tag.dyn_size_ext or not tag._validate_in_current_graph()):
1699
+ if tag.description == self.description and (
1700
+ tag.dyn_size_ext is None or not tag._validate_in_current_graph()
1701
+ ):
1699
1702
  tag.dyn_size_ext = self.get_dyn_size_ext_for_batch_ctx(
1700
1703
  tag.batch, tag.control_flow_ctx, template_only=True
1701
1704
  )
@@ -1709,7 +1712,7 @@ class _DimMixin:
1709
1712
  other_same_base.batch, other_same_base.control_flow_ctx, template_only=True
1710
1713
  )
1711
1714
  other_same_base._maybe_update()
1712
- if not self.dyn_size_ext or not self._validate_in_current_graph():
1715
+ if self.dyn_size_ext is None or not self._validate_in_current_graph():
1713
1716
  self.dyn_size_ext = other_same_base.get_dyn_size_ext_for_batch_ctx(
1714
1717
  self.batch, self.control_flow_ctx, template_only=True
1715
1718
  )
@@ -1720,7 +1723,7 @@ class _DimMixin:
1720
1723
  )
1721
1724
  other_same_base._maybe_update()
1722
1725
  if (
1723
- self.dyn_size_ext
1726
+ self.dyn_size_ext is not None
1724
1727
  and self.dyn_size_ext.raw_tensor is None
1725
1728
  and other_same_base.dyn_size_ext.raw_tensor is not None
1726
1729
  ):
@@ -1792,7 +1795,7 @@ class _DimMixin:
1792
1795
  if not dim._validate_in_current_graph():
1793
1796
  continue
1794
1797
  self_dim = self._make_extra().same_for_batch_ctx.get(key, None)
1795
- if self_dim and (self_dim.dyn_size_ext or not dim.dyn_size_ext):
1798
+ if self_dim and (self_dim.dyn_size_ext is not None or dim.dyn_size_ext is None):
1796
1799
  continue # keep ours
1797
1800
  if dim.dyn_size_ext is None:
1798
1801
  continue # undefined, do not overtake
@@ -1982,7 +1985,7 @@ class _DimMixin:
1982
1985
  self.complete_dyn_size()
1983
1986
  if self._dyn_size_max_value is not None:
1984
1987
  return self._dyn_size_max_value
1985
- if self.dyn_size_ext and self.dyn_size_ext.placeholder is not None:
1988
+ if self.dyn_size_ext is not None and self.dyn_size_ext.placeholder is not None:
1986
1989
  if self.dyn_size_ext.batch_ndim > 0:
1987
1990
  res = rf.reduce_max(
1988
1991
  self.dyn_size_ext,
@@ -330,7 +330,7 @@ class _TensorMixin(_TensorMixinBase):
330
330
  if tag.is_batch_dim():
331
331
  continue
332
332
  if tag.is_dynamic():
333
- assert tag.dyn_size_ext, "%s sanity_check: dynamic dim %s undefined" % (self, tag)
333
+ assert tag.dyn_size_ext is not None, "%s sanity_check: dynamic dim %s undefined" % (self, tag)
334
334
  if not ignore_placeholder:
335
335
  if tag.dyn_size_ext.placeholder is None:
336
336
  tag.complete_dyn_size()
@@ -2519,7 +2519,7 @@ class _TensorMixin(_TensorMixinBase):
2519
2519
  dim_tag = self.dim_tags[axis]
2520
2520
  # It's possible that dim tags are not unique (https://github.com/rwth-i6/returnn/issues/632).
2521
2521
  matching_tags = [i for (i, tag) in enumerate(self.dim_tags) if tag == dim_tag]
2522
- if dim_tag.dyn_size_ext and len(matching_tags) == 1:
2522
+ if dim_tag.dyn_size_ext is not None and len(matching_tags) == 1:
2523
2523
  return dim_tag
2524
2524
  if axis == self.time_dim_axis:
2525
2525
  return "T" # this might change
@@ -2800,7 +2800,7 @@ class _TensorMixin(_TensorMixinBase):
2800
2800
  assert self.batch_dim_axis is not None
2801
2801
  batch_dim_ = self._dims[self.batch_dim_axis]
2802
2802
  assert isinstance(batch_dim_, Dim)
2803
- if batch_dim_.dyn_size_ext and batch_dim_.dyn_size_ext.raw_tensor is not None:
2803
+ if batch_dim_.dyn_size_ext is not None and batch_dim_.dyn_size_ext.raw_tensor is not None:
2804
2804
  backend = batch_dim_.dyn_size_ext._raw_backend
2805
2805
  return backend.fill_raw([batch_dim_.dyn_size_ext.raw_tensor], dim.size)
2806
2806
  import tensorflow as tf
@@ -2847,7 +2847,7 @@ class _TensorMixin(_TensorMixinBase):
2847
2847
  assert 0 <= axis < self.batch_ndim
2848
2848
  assert axis != self.batch_dim_axis
2849
2849
  tag: Dim = self.dim_tags[axis]
2850
- assert tag.dyn_size_ext and tag.dyn_size_ext.raw_tensor is not None
2850
+ assert tag.dyn_size_ext is not None and tag.dyn_size_ext.raw_tensor is not None
2851
2851
  backend = tag.dyn_size_ext._raw_backend
2852
2852
  assert set(tag.dyn_size_ext.dim_tags).issubset(self.dim_tags) # https://github.com/rwth-i6/returnn/issues/721
2853
2853
  with backend.name_scope_raw("get_sequence_mask_broadcast"):
@@ -2900,7 +2900,7 @@ class _TensorMixin(_TensorMixinBase):
2900
2900
  assert 0 <= axis < self.batch_ndim
2901
2901
  assert axis != self.batch_dim_axis
2902
2902
  tag = self.dim_tags[axis]
2903
- assert tag.dyn_size_ext
2903
+ assert tag.dyn_size_ext is not None
2904
2904
  return tag.dyn_size_ext.copy_compatible_to(self, check_dtype=False, check_sparse=False).placeholder
2905
2905
 
2906
2906
  def num_elements(self: Tensor) -> Union[int, Tensor]:
@@ -17,6 +17,16 @@ class _TensorOpOverloadsMixin(_TensorMixinBase):
17
17
  # Note that all those ops have native implementations as well,
18
18
  # so keep the logic in sync.
19
19
 
20
+ def __bool__(self):
21
+ from returnn.log import log
22
+ from returnn.util.basic import BehaviorVersion
23
+
24
+ if BehaviorVersion.get() >= 22:
25
+ raise TypeError(f"{self} __bool__: Using a Tensor in a boolean context is not allowed.")
26
+ log.print_deprecation_warning(
27
+ f"{self} __bool__: Using a Tensor in a boolean context is deprecated.", behavior_version=22
28
+ )
29
+
20
30
  # --- comparisons
21
31
 
22
32
  def __eq__(self: Tensor, other: Union[_rf_types.RawTensorTypes, Tensor]) -> Union[Tensor, bool]:
@@ -70,7 +70,7 @@ class Dim(_DimMixin):
70
70
  if dimension is None:
71
71
  self.capacity = capacity
72
72
  self.size = None
73
- self.dyn_size_ext = dyn_size_ext.copy() if dyn_size_ext else None
73
+ self.dyn_size_ext = dyn_size_ext.copy() if dyn_size_ext is not None else None
74
74
  elif isinstance(dimension, int):
75
75
  self.capacity = capacity or dimension
76
76
  self.size = dimension
@@ -100,7 +100,7 @@ class TensorDict:
100
100
  continue
101
101
  key_ = f"{key}:size{i}"
102
102
  assert key_ not in out
103
- if dim.is_batch_dim() and (not dim.dyn_size_ext or dim.dyn_size_ext.raw_tensor is None):
103
+ if dim.is_batch_dim() and (dim.dyn_size_ext is None or dim.dyn_size_ext.raw_tensor is None):
104
104
  if include_scalar_dyn_sizes:
105
105
  dim_value = dim.get_dim_value()
106
106
  assert isinstance(
@@ -63,7 +63,7 @@ class ReturnnLayersBackend(Backend[Layer]):
63
63
  usages: List[Tensor] = []
64
64
  visited = set()
65
65
  for use in x.raw_tensor.usages:
66
- if not use.tensor or use.tensor in visited:
66
+ if use.tensor is None or use.tensor in visited:
67
67
  continue
68
68
  visited.add(use.tensor)
69
69
  usages.append(use.tensor)
@@ -878,7 +878,7 @@ class ReturnnLayersBackend(Backend[Layer]):
878
878
  out = rfl.make_layer(
879
879
  {
880
880
  "class": "eval",
881
- "from": [recent] if recent else [], # use as control dependency
881
+ "from": [recent] if recent is not None else [], # use as control dependency
882
882
  "eval": _random_replay_eval,
883
883
  "eval_locals": {"idx": ReturnnLayersBackend._random_journal.get_graph_reader_idx()},
884
884
  "out_type": {"dims": dims, "dtype": dtype, "sparse_dim": sparse_dim, "feature_dim": feature_dim},
@@ -67,7 +67,7 @@ def _register_dim_deps_when_novel(dim: Dim, deps: List[Tensor]):
67
67
  return # discard new list, keep old
68
68
  if _register_dim_via_dyn_layer(dim):
69
69
  return
70
- if dim.dyn_size_ext and not isinstance(dim.dyn_size_ext.raw_tensor, rfl.Layer):
70
+ if dim.dyn_size_ext is not None and not isinstance(dim.dyn_size_ext.raw_tensor, rfl.Layer):
71
71
  # In the TF net dict backend, the dims dyn_size_ext are usually just templates.
72
72
  # The raw_tensor would not be set.
73
73
  # Once the net dict is created, and then only when the actual TFNetwork is created,
@@ -110,7 +110,7 @@ def _register_dim_via_dyn_layer(dim: Dim) -> bool:
110
110
  return False
111
111
  if dim in _dim_deps:
112
112
  return False
113
- assert dim.dyn_size_ext
113
+ assert dim.dyn_size_ext is not None
114
114
  if dim.dyn_size_ext.raw_tensor is None:
115
115
  return False
116
116
  assert isinstance(dim.dyn_size_ext.raw_tensor, rfl.Layer)
@@ -180,7 +180,7 @@ class Layer:
180
180
 
181
181
  def __repr__(self):
182
182
  parts = [self.get_abs_name_repr()]
183
- if self.tensor:
183
+ if self.tensor is not None:
184
184
  parts.append("[%s]" % ",".join(self.tensor.get_batch_axes_short_description()))
185
185
  return f"<{self.__class__.__name__} {' '.join(parts)}>"
186
186
 
@@ -634,7 +634,7 @@ class Layer:
634
634
  Creates the child together with a layer ref if it does not exist yet.
635
635
  """
636
636
  child = self.get_child(name)
637
- if not child.tensor:
637
+ if child.tensor is None:
638
638
  child.tensor = data
639
639
  assert child.tensor is data
640
640
  if data.raw_tensor is None:
@@ -718,7 +718,11 @@ class Layer:
718
718
  # However, we allow to use the name if it is the attrib itself.
719
719
  if self.module and name not in reserved_names and getattr(self.parent.module, name, None) is self.module:
720
720
  return name
721
- if self.tensor and name not in reserved_names and getattr(self.parent.module, name, None) is self.tensor:
721
+ if (
722
+ self.tensor is not None
723
+ and name not in reserved_names
724
+ and getattr(self.parent.module, name, None) is self.tensor
725
+ ):
722
726
  return name
723
727
  # We might exclude all other attribs.
724
728
  # However, e.g. "dropout" is a common attrib storing the dropout rate (float),
@@ -761,7 +765,7 @@ class Layer:
761
765
  nest.map_structure(_maybe_add_dep, self.layer_dict)
762
766
  if self.children and "output" in self.children:
763
767
  _maybe_add_dep(self.children["output"].tensor)
764
- if self.parent and self.parent.tensor:
768
+ if self.parent and self.parent.tensor is not None:
765
769
  _maybe_add_dep(self.parent.tensor)
766
770
  if self.layer_extra_dependencies:
767
771
  dep_list.extend(self.layer_extra_dependencies)
@@ -1101,7 +1105,7 @@ class _NetDictBuilderCtx:
1101
1105
  if dim.dyn_size_ext is None:
1102
1106
  dim.complete_dyn_size()
1103
1107
  assert (
1104
- dim.dyn_size_ext
1108
+ dim.dyn_size_ext is not None
1105
1109
  ), f"{sub_name_ctx}: need {dim} to be defined to be able to know about implicit dims"
1106
1110
  dim_tags.extend(data_template.dim_tags_set_implicit_only_wrapped)
1107
1111
  assert len(dim_tags) == len(
@@ -539,8 +539,8 @@ class _LoopState:
539
539
 
540
540
  def _map_name_ctx_to_last_tensor(self, name_ctx: rfl.Layer) -> Tensor:
541
541
  assert isinstance(name_ctx, rfl.Layer)
542
- assert name_ctx.tensor, f"{self.loop} state {name_ctx} not assigned?"
543
- assert self.loop.name_ctx.tensor, f"{self.loop} not yet exited?"
542
+ assert name_ctx.tensor is not None, f"{self.loop} state {name_ctx} not assigned?"
543
+ assert self.loop.name_ctx.tensor is not None, f"{self.loop} not yet exited?"
544
544
  return self.loop.last(name_ctx.tensor)
545
545
 
546
546
  def get_last(self):
@@ -313,7 +313,7 @@ def register_extern_data(data: Tensor[rfl.Layer]):
313
313
  dtype=data.size_dtype,
314
314
  batch=data.batch,
315
315
  )
316
- if tag.is_batch_dim() and not tag.dyn_size_ext and tag.dimension is None:
316
+ if tag.is_batch_dim() and tag.dyn_size_ext is None and tag.dimension is None:
317
317
  # Undefined batch dim tag. Set default data template.
318
318
  batch_dim.dyn_size_ext = orig_tag.dyn_size_ext = tag.dyn_size_ext = Tensor(
319
319
  name=f"batch_dim_default_dyn_size_ext",
@@ -22,7 +22,7 @@ class PrevTensorRef(Tensor):
22
22
  """
23
23
  parent_name_ctx = cur_layer_name_ctx.parent
24
24
  prev_tensor_name_ctx = parent_name_ctx.get_child(f"prev:{cur_layer_name_ctx.name}")
25
- if prev_tensor_name_ctx.tensor:
25
+ if prev_tensor_name_ctx.tensor is not None:
26
26
  prev_tensor_ref = prev_tensor_name_ctx.tensor
27
27
  assert isinstance(prev_tensor_ref, PrevTensorRef)
28
28
  assert prev_tensor_ref.cur_layer_name_ctx is cur_layer_name_ctx