returnn 1.20240205.153348__tar.gz → 1.20240206.450__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of returnn might be problematic. Click here for more details.

Files changed (444) hide show
  1. {returnn-1.20240205.153348/returnn.egg-info → returnn-1.20240206.450}/PKG-INFO +1 -1
  2. returnn-1.20240206.450/_setup_info_generated.py +2 -0
  3. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/datasets/generating.py +13 -28
  4. returnn-1.20240206.450/returnn/tensor/utils.py +237 -0
  5. {returnn-1.20240205.153348 → returnn-1.20240206.450/returnn.egg-info}/PKG-INFO +1 -1
  6. returnn-1.20240205.153348/_setup_info_generated.py +0 -2
  7. returnn-1.20240205.153348/returnn/tensor/utils.py +0 -118
  8. {returnn-1.20240205.153348 → returnn-1.20240206.450}/.editorconfig +0 -0
  9. {returnn-1.20240205.153348 → returnn-1.20240206.450}/.gitignore +0 -0
  10. {returnn-1.20240205.153348 → returnn-1.20240206.450}/.gitmodules +0 -0
  11. {returnn-1.20240205.153348 → returnn-1.20240206.450}/.kateconfig +0 -0
  12. {returnn-1.20240205.153348 → returnn-1.20240206.450}/CHANGELOG.md +0 -0
  13. {returnn-1.20240205.153348 → returnn-1.20240206.450}/CODEOWNERS +0 -0
  14. {returnn-1.20240205.153348 → returnn-1.20240206.450}/CONTRIBUTING.md +0 -0
  15. {returnn-1.20240205.153348 → returnn-1.20240206.450}/LICENSE +0 -0
  16. {returnn-1.20240205.153348 → returnn-1.20240206.450}/MANIFEST.in +0 -0
  17. {returnn-1.20240205.153348 → returnn-1.20240206.450}/README.rst +0 -0
  18. {returnn-1.20240205.153348 → returnn-1.20240206.450}/__init__.py +0 -0
  19. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/12AX.cluster_map +0 -0
  20. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/_setup_returnn_env.py +0 -0
  21. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/demo-fwd.config +0 -0
  22. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/demo-horovod-mpi.py +0 -0
  23. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/demo-horovod-mpi.py.sh +0 -0
  24. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/demo-horovod-mpi.sh +0 -0
  25. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/demo-hyper-param-tuning.config +0 -0
  26. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/demo-iter-dataset.py +0 -0
  27. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/demo-list-devices.py +0 -0
  28. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/demo-lua-torch-layer.config +0 -0
  29. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/demo-pretrain.config +0 -0
  30. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/demo-record-and-push-to-webserver.py +0 -0
  31. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/demo-returnn-as-framework.py +0 -0
  32. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/demo-rf-pt-benchmark.py +0 -0
  33. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/demo-rf.config +0 -0
  34. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/demo-rhn-enwik8.config +0 -0
  35. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/demo-sprint-interface.py +0 -0
  36. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/demo-tf-att-copy.config +0 -0
  37. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/demo-tf-attention.config +0 -0
  38. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/demo-tf-chunking-blstm.12ax.config +0 -0
  39. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/demo-tf-contribrnn-lstm.12ax.config +0 -0
  40. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/demo-tf-enc-dec.config +0 -0
  41. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/demo-tf-hard-att-copy.config +0 -0
  42. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/demo-tf-lstm-benchmark.py +0 -0
  43. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/demo-tf-maxgradnorm-lstm.12ax.config +0 -0
  44. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/demo-tf-native-lstm-lowmem.12ax.config +0 -0
  45. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/demo-tf-native-lstm.12ax.config +0 -0
  46. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/demo-tf-native-lstm2.12ax.config +0 -0
  47. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/demo-tf-native-lstm2.12ax.tuned.config +0 -0
  48. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/demo-tf-neural-transducer.12ax.config +0 -0
  49. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/demo-tf-rec-explicit-lstm.config +0 -0
  50. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/demo-tf-rec-explicit-rnn.config +0 -0
  51. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/demo-tf-rec-self-att.config +0 -0
  52. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/demo-tf-search-compiled-graph.py +0 -0
  53. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/demo-tf-vanilla-lstm.12ax.config +0 -0
  54. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/demo-timit-lstm-ctc.config +0 -0
  55. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/demo-torch.config +0 -0
  56. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/demo-upd-mult-model.lstm.12ax.config +0 -0
  57. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/demo.sh +0 -0
  58. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/mdlstm/IAM/IAM_lines/a01-000u-00.png +0 -0
  59. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/mdlstm/IAM/IAM_lines/a01-007-04.png +0 -0
  60. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/mdlstm/IAM/IAM_lines/a01-007-06.png +0 -0
  61. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/mdlstm/IAM/README.txt +0 -0
  62. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/mdlstm/IAM/chars.txt +0 -0
  63. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/mdlstm/IAM/config_demo +0 -0
  64. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/mdlstm/IAM/config_fwd +0 -0
  65. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/mdlstm/IAM/config_real +0 -0
  66. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/mdlstm/IAM/create_IAM_dataset.py +0 -0
  67. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/mdlstm/IAM/decode.py +0 -0
  68. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/mdlstm/IAM/features/raw/demo.h5 +0 -0
  69. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/mdlstm/IAM/go.sh +0 -0
  70. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/mdlstm/IAM/lines.txt +0 -0
  71. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/mdlstm/IAM/split/eval.txt +0 -0
  72. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/mdlstm/IAM/split/train.txt +0 -0
  73. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/mdlstm/IAM/split/valid.txt +0 -0
  74. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/mdlstm/README.md +0 -0
  75. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/mdlstm/artificial/create_test_h5.py +0 -0
  76. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/mdlstm/artificial/forwardconfig +0 -0
  77. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/mdlstm/artificial/go.sh +0 -0
  78. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/mdlstm/artificial/trainconfig +0 -0
  79. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/mdlstm/artificial_rgb/create_test_h5.py +0 -0
  80. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/mdlstm/artificial_rgb/forwardconfig +0 -0
  81. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/mdlstm/artificial_rgb/go.sh +0 -0
  82. {returnn-1.20240205.153348 → returnn-1.20240206.450}/demos/mdlstm/artificial_rgb/trainconfig +0 -0
  83. {returnn-1.20240205.153348 → returnn-1.20240206.450}/pyproject.toml +0 -0
  84. {returnn-1.20240205.153348 → returnn-1.20240206.450}/requirements.txt +0 -0
  85. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/__init__.py +0 -0
  86. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/__main__.py +0 -0
  87. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/__old_mod_loader__.py +0 -0
  88. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/__setup__.py +0 -0
  89. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/config.py +0 -0
  90. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/datasets/__init__.py +0 -0
  91. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/datasets/audio.py +0 -0
  92. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/datasets/basic.py +0 -0
  93. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/datasets/bundle_file.py +0 -0
  94. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/datasets/cached.py +0 -0
  95. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/datasets/cached2.py +0 -0
  96. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/datasets/hdf.py +0 -0
  97. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/datasets/lm.py +0 -0
  98. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/datasets/map.py +0 -0
  99. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/datasets/meta.py +0 -0
  100. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/datasets/multi_proc.py +0 -0
  101. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/datasets/normalization_data.py +0 -0
  102. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/datasets/numpy_dump.py +0 -0
  103. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/datasets/raw_wav.py +0 -0
  104. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/datasets/sprint.py +0 -0
  105. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/datasets/stereo.py +0 -0
  106. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/datasets/util/__init__.py +0 -0
  107. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/datasets/util/feature_extraction.py +0 -0
  108. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/datasets/util/strings.py +0 -0
  109. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/datasets/util/vocabulary.py +0 -0
  110. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/engine/__init__.py +0 -0
  111. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/engine/base.py +0 -0
  112. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/engine/batch.py +0 -0
  113. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/extern/WarpRna/__init__.py +0 -0
  114. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/extern/WarpRna/__main__.py +0 -0
  115. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/extern/WarpRna/warp-rna/.git +0 -0
  116. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/extern/WarpRna/warp-rna/.gitignore +0 -0
  117. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/extern/WarpRna/warp-rna/LICENSE +0 -0
  118. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/extern/WarpRna/warp-rna/README.md +0 -0
  119. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/extern/WarpRna/warp-rna/aligner.gif +0 -0
  120. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/extern/WarpRna/warp-rna/check.png +0 -0
  121. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/extern/WarpRna/warp-rna/core.cu +0 -0
  122. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/extern/WarpRna/warp-rna/core.h +0 -0
  123. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/extern/WarpRna/warp-rna/core_cpu.cpp +0 -0
  124. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/extern/WarpRna/warp-rna/pytorch_binding/LICENSE +0 -0
  125. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/extern/WarpRna/warp-rna/pytorch_binding/MANIFEST.in +0 -0
  126. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/extern/WarpRna/warp-rna/pytorch_binding/README.md +0 -0
  127. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/extern/WarpRna/warp-rna/pytorch_binding/binding.cpp +0 -0
  128. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/extern/WarpRna/warp-rna/pytorch_binding/core.cu +0 -0
  129. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/extern/WarpRna/warp-rna/pytorch_binding/core.h +0 -0
  130. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/extern/WarpRna/warp-rna/pytorch_binding/requirements.txt +0 -0
  131. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/extern/WarpRna/warp-rna/pytorch_binding/setup.py +0 -0
  132. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/extern/WarpRna/warp-rna/pytorch_binding/warp_rna/__init__.py +0 -0
  133. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/extern/WarpRna/warp-rna/pytorch_binding/warp_rna/test.py +0 -0
  134. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/extern/WarpRna/warp-rna/ref_rna.py +0 -0
  135. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/extern/WarpRna/warp-rna/tensorflow_binding/setup.py +0 -0
  136. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/extern/WarpRna/warp-rna/tensorflow_binding/src/warp_rna_op.cc +0 -0
  137. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/extern/WarpRna/warp-rna/tensorflow_binding/src/warp_rna_op_kernel_tmpl.h +0 -0
  138. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/extern/WarpRna/warp-rna/tensorflow_binding/warp_rna/__init__.py +0 -0
  139. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/extern/WarpRna/warp-rna/test.cpp +0 -0
  140. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/extern/__init__.py +0 -0
  141. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/extern/graph_editor/README.md +0 -0
  142. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/extern/graph_editor/__init__.py +0 -0
  143. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/extern/graph_editor/edit.py +0 -0
  144. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/extern/graph_editor/reroute.py +0 -0
  145. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/extern/graph_editor/select.py +0 -0
  146. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/extern/graph_editor/subgraph.py +0 -0
  147. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/extern/graph_editor/transform.py +0 -0
  148. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/extern/graph_editor/util.py +0 -0
  149. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/forward_iface.py +0 -0
  150. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/__init__.py +0 -0
  151. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/_backend.py +0 -0
  152. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/_native/__init__.py +0 -0
  153. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/_native/backend.cpp +0 -0
  154. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/_native/backend.hpp +0 -0
  155. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/_native/module.cpp +0 -0
  156. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/_native/module.hpp +0 -0
  157. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/_native/py_utils.hpp +0 -0
  158. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/_native/tensor_ops.cpp +0 -0
  159. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/_native/tensor_ops.hpp +0 -0
  160. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/_numpy_backend.py +0 -0
  161. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/_random_journal.py +0 -0
  162. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/_utils.py +0 -0
  163. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/array_.py +0 -0
  164. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/attention.py +0 -0
  165. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/audio/__init__.py +0 -0
  166. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/audio/mel.py +0 -0
  167. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/audio/specaugment.py +0 -0
  168. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/backend.py +0 -0
  169. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/cond.py +0 -0
  170. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/const.py +0 -0
  171. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/container.py +0 -0
  172. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/control_flow_ctx.py +0 -0
  173. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/conv.py +0 -0
  174. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/decoder/__init__.py +0 -0
  175. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/decoder/transformer.py +0 -0
  176. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/device.py +0 -0
  177. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/dims.py +0 -0
  178. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/dropout.py +0 -0
  179. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/dtype.py +0 -0
  180. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/encoder/__init__.py +0 -0
  181. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/encoder/base.py +0 -0
  182. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/encoder/conformer.py +0 -0
  183. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/gradient.py +0 -0
  184. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/graph.py +0 -0
  185. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/hooks.py +0 -0
  186. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/init.py +0 -0
  187. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/label_smoothing.py +0 -0
  188. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/linear.py +0 -0
  189. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/loop.py +0 -0
  190. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/loss.py +0 -0
  191. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/math_.py +0 -0
  192. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/matmul.py +0 -0
  193. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/module.py +0 -0
  194. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/normalization.py +0 -0
  195. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/parameter.py +0 -0
  196. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/rand.py +0 -0
  197. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/rec.py +0 -0
  198. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/reduce.py +0 -0
  199. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/run_ctx.py +0 -0
  200. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/signal.py +0 -0
  201. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/state.py +0 -0
  202. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/tensor_array.py +0 -0
  203. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/frontend/types.py +0 -0
  204. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/import_/__init__.py +0 -0
  205. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/import_/common.py +0 -0
  206. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/import_/git.py +0 -0
  207. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/import_/import_.py +0 -0
  208. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/learning_rate_control.py +0 -0
  209. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/log.py +0 -0
  210. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/native_op.cpp +0 -0
  211. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/native_op.py +0 -0
  212. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/pretrain.py +0 -0
  213. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/sprint/__init__.py +0 -0
  214. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/sprint/cache.py +0 -0
  215. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/sprint/control.py +0 -0
  216. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/sprint/error_signals.py +0 -0
  217. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/sprint/extern_interface.py +0 -0
  218. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/sprint/interface.py +0 -0
  219. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tensor/README.md +0 -0
  220. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tensor/__init__.py +0 -0
  221. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tensor/_dim_extra.py +0 -0
  222. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tensor/_tensor_extra.py +0 -0
  223. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tensor/_tensor_mixin_base.py +0 -0
  224. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tensor/_tensor_op_overloads.py +0 -0
  225. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tensor/control_flow_ctx.py +0 -0
  226. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tensor/dim.py +0 -0
  227. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tensor/marked_dim.py +0 -0
  228. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tensor/tensor.py +0 -0
  229. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tensor/tensor_dict.py +0 -0
  230. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tf/__init__.py +0 -0
  231. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tf/compat.py +0 -0
  232. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tf/data_pipeline.py +0 -0
  233. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tf/distributed.py +0 -0
  234. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tf/engine.py +0 -0
  235. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tf/frontend_layers/README.md +0 -0
  236. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tf/frontend_layers/__init__.py +0 -0
  237. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tf/frontend_layers/_backend.py +0 -0
  238. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tf/frontend_layers/_utils.py +0 -0
  239. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tf/frontend_layers/cond.py +0 -0
  240. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tf/frontend_layers/config_entry_points.py +0 -0
  241. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tf/frontend_layers/debug_eager_mode.py +0 -0
  242. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tf/frontend_layers/dims.py +0 -0
  243. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tf/frontend_layers/layer.py +0 -0
  244. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tf/frontend_layers/loop.py +0 -0
  245. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tf/frontend_layers/make_layer.py +0 -0
  246. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tf/frontend_layers/masked_computation.py +0 -0
  247. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tf/frontend_layers/parameter_assign.py +0 -0
  248. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tf/frontend_layers/prev_tensor_ref.py +0 -0
  249. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tf/frontend_low_level/__init__.py +0 -0
  250. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tf/frontend_low_level/_backend.py +0 -0
  251. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tf/horovod.py +0 -0
  252. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tf/hyper_param_tuning.py +0 -0
  253. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tf/layers/__init__.py +0 -0
  254. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tf/layers/base.py +0 -0
  255. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tf/layers/basic.py +0 -0
  256. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tf/layers/rec.py +0 -0
  257. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tf/layers/segmental_model.py +0 -0
  258. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tf/layers/signal_processing.py +0 -0
  259. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tf/layers/variable.py +0 -0
  260. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tf/native_op.py +0 -0
  261. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tf/network.py +0 -0
  262. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tf/sprint.py +0 -0
  263. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tf/updater.py +0 -0
  264. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tf/util/__init__.py +0 -0
  265. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tf/util/basic.py +0 -0
  266. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tf/util/data.py +0 -0
  267. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tf/util/gradient_checkpoint.py +0 -0
  268. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tf/util/ken_lm.py +0 -0
  269. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/tf/util/open_fst.py +0 -0
  270. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/torch/README.md +0 -0
  271. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/torch/__init__.py +0 -0
  272. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/torch/data/__init__.py +0 -0
  273. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/torch/data/extern_data.py +0 -0
  274. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/torch/data/pipeline.py +0 -0
  275. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/torch/data/queued_data_iter.py +0 -0
  276. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/torch/data/returnn_dataset_wrapper.py +0 -0
  277. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/torch/data/tensor_utils.py +0 -0
  278. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/torch/distributed.py +0 -0
  279. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/torch/engine.py +0 -0
  280. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/torch/frontend/__init__.py +0 -0
  281. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/torch/frontend/_backend.py +0 -0
  282. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/torch/frontend/_rand.py +0 -0
  283. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/torch/frontend/bridge.py +0 -0
  284. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/torch/frontend/raw_ops.py +0 -0
  285. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/torch/updater.py +0 -0
  286. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/torch/util/README.md +0 -0
  287. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/torch/util/__init__.py +0 -0
  288. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/torch/util/diagnose_gpu.py +0 -0
  289. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/torch/util/scaled_gradient.py +0 -0
  290. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/util/__init__.py +0 -0
  291. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/util/basic.py +0 -0
  292. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/util/better_exchook.py +0 -0
  293. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/util/bpe.py +0 -0
  294. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/util/debug.py +0 -0
  295. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/util/debug_helpers.py +0 -0
  296. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/util/fsa.py +0 -0
  297. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/util/literal_py_to_pickle.py +0 -0
  298. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/util/math.py +0 -0
  299. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/util/multi_proc_non_daemonic_spawn.py +0 -0
  300. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/util/native_code_compiler.py +0 -0
  301. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/util/pprint.py +0 -0
  302. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/util/py-to-pickle.cpp +0 -0
  303. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/util/py_compat.py +0 -0
  304. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/util/py_ext_mod_compiler.py +0 -0
  305. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/util/result_with_reason.py +0 -0
  306. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/util/sig_proc.py +0 -0
  307. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/util/task_system.py +0 -0
  308. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/util/train_proc_manager.py +0 -0
  309. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn/util/watch_memory.py +0 -0
  310. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn.egg-info/SOURCES.txt +0 -0
  311. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn.egg-info/dependency_links.txt +0 -0
  312. {returnn-1.20240205.153348 → returnn-1.20240206.450}/returnn.egg-info/top_level.txt +0 -0
  313. {returnn-1.20240205.153348 → returnn-1.20240206.450}/rnn.py +0 -0
  314. {returnn-1.20240205.153348 → returnn-1.20240206.450}/setup.cfg +0 -0
  315. {returnn-1.20240205.153348 → returnn-1.20240206.450}/setup.py +0 -0
  316. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/DummySprintExec.py +0 -0
  317. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/PyCharm-inspection-profile.xml +0 -0
  318. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/PyCharm.idea/.gitignore +0 -0
  319. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/PyCharm.idea/.name +0 -0
  320. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/PyCharm.idea/codeStyleSettings.xml +0 -0
  321. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/PyCharm.idea/codeStyles/Project.xml +0 -0
  322. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/PyCharm.idea/codeStyles/codeStyleConfig.xml +0 -0
  323. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/PyCharm.idea/inspectionProfiles/Project_Default.xml +0 -0
  324. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/PyCharm.idea/inspectionProfiles/profiles_settings.xml +0 -0
  325. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/PyCharm.idea/misc.xml +0 -0
  326. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/PyCharm.idea/modules.xml +0 -0
  327. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/PyCharm.idea/returnn.iml +0 -0
  328. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/PyCharm.idea/scopes/scope_settings.xml +0 -0
  329. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/_set_num_threads1.py +0 -0
  330. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/_setup_returnn_env.py +0 -0
  331. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/_setup_test_env.py +0 -0
  332. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/bpe-unicode-demo.codes +0 -0
  333. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/bpe-unicode-demo.vocab +0 -0
  334. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/lexicon_opt.fst +0 -0
  335. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/lexicon_opt.isyms +0 -0
  336. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/lexicon_opt.jpg +0 -0
  337. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/lexicon_opt.osyms +0 -0
  338. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/lint_common.py +0 -0
  339. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/pycharm-inspect.py +0 -0
  340. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/pylint.py +0 -0
  341. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/returnn-as-framework.py +0 -0
  342. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/rf_utils.py +0 -0
  343. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/spelling.dic +0 -0
  344. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_Config.py +0 -0
  345. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_Dataset.py +0 -0
  346. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_Fsa.py +0 -0
  347. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_GeneratingDataset.py +0 -0
  348. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_HDFDataset.py +0 -0
  349. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_LearningRateControl.py +0 -0
  350. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_Log.py +0 -0
  351. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_MultiProcDataset.py +0 -0
  352. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_Pretrain.py +0 -0
  353. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_ResNet.py +0 -0
  354. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_SprintDataset.py +0 -0
  355. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_SprintInterface.py +0 -0
  356. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_TFEngine.py +0 -0
  357. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_TFNativeOp.py +0 -0
  358. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_TFNetworkLayer.py +0 -0
  359. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_TFNetworkRecLayer.py +0 -0
  360. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_TFNetworkSigProcLayer.py +0 -0
  361. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_TFUpdater.py +0 -0
  362. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_TFUtil.py +0 -0
  363. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_TF_determinism.py +0 -0
  364. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_TaskSystem.py +0 -0
  365. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_TaskSystem_SharedMem.py +0 -0
  366. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_TranslationDataset.py +0 -0
  367. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_Util.py +0 -0
  368. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_demos.py +0 -0
  369. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_fork_exec.py +0 -0
  370. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_hdf_dump.py +0 -0
  371. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_rf_array.py +0 -0
  372. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_rf_attention.py +0 -0
  373. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_rf_base.py +0 -0
  374. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_rf_cond.py +0 -0
  375. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_rf_const.py +0 -0
  376. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_rf_container.py +0 -0
  377. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_rf_conv.py +0 -0
  378. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_rf_encoder_conformer.py +0 -0
  379. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_rf_gradient.py +0 -0
  380. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_rf_label_smoothing.py +0 -0
  381. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_rf_loop.py +0 -0
  382. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_rf_math.py +0 -0
  383. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_rf_normalization.py +0 -0
  384. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_rf_rec.py +0 -0
  385. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_rf_reduce.py +0 -0
  386. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_rf_signal.py +0 -0
  387. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_tensor.py +0 -0
  388. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_tools.py +0 -0
  389. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_torch_dataset.py +0 -0
  390. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_torch_engine.py +0 -0
  391. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_torch_frontend.py +0 -0
  392. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tests/test_torch_internal_frontend.py +0 -0
  393. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/_setup_returnn_env.py +0 -0
  394. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/analyze-dataset-batches.py +0 -0
  395. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/bliss-collect-seq-lens.py +0 -0
  396. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/bliss-dump-text.py +0 -0
  397. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/bliss-get-segment-names.py +0 -0
  398. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/bliss-to-ogg-zip.py +0 -0
  399. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/bpe-create-lexicon.py +0 -0
  400. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/calculate-word-error-rate.py +0 -0
  401. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/cleanup-old-models.py +0 -0
  402. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/collect-orth-symbols.py +0 -0
  403. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/collect-words.py +0 -0
  404. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/compile_native_op.py +0 -0
  405. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/compile_tf_graph.py +0 -0
  406. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/debug-dump-search-scores.py +0 -0
  407. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/debug-plot-search-scores.py +0 -0
  408. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/dump-dataset-raw-strings.py +0 -0
  409. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/dump-dataset.py +0 -0
  410. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/dump-forward-stats.py +0 -0
  411. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/dump-forward.py +0 -0
  412. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/dump-network-json.py +0 -0
  413. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/dump-pickle.py +0 -0
  414. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/extract_state_tying_from_dataset.py +0 -0
  415. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/get-attention-weights.py +0 -0
  416. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/get-best-model-epoch.py +0 -0
  417. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/hdf_dump.py +0 -0
  418. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/hdf_dump_translation_dataset.py +0 -0
  419. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/import-blocks-mt-model.py +0 -0
  420. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/import-t2t-mt-model.py +0 -0
  421. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/lattice_rescorer/.gitignore +0 -0
  422. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/lattice_rescorer/Makefile +0 -0
  423. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/lattice_rescorer/README.md +0 -0
  424. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/lattice_rescorer/example/README.md +0 -0
  425. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/lattice_rescorer/example/libs_list +0 -0
  426. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/lattice_rescorer/example/network.040/i600_m600_m600.sgd_b16_lr0_cl2.newbobabs.config +0 -0
  427. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/lattice_rescorer/example/network.040/i600_m600_m600.sgd_b16_lr0_cl2.newbobabs.keep_over_epoch.lstm2.config +0 -0
  428. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/lattice_rescorer/example/rescore_lattice.sh +0 -0
  429. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/lattice_rescorer/example/state_vars_list +0 -0
  430. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/lattice_rescorer/example/tensor_names_list +0 -0
  431. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/lattice_rescorer/file.h +0 -0
  432. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/lattice_rescorer/htklatticerescorer.cc +0 -0
  433. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/lattice_rescorer/htklatticerescorer.h +0 -0
  434. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/lattice_rescorer/main.cc +0 -0
  435. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/lattice_rescorer/rescorer.h +0 -0
  436. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/lattice_rescorer/vocabulary.cc +0 -0
  437. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/lattice_rescorer/vocabulary.h +0 -0
  438. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/tf_avg_checkpoints.py +0 -0
  439. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/tf_inspect_checkpoint.py +0 -0
  440. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/tf_inspect_summary_log.py +0 -0
  441. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/torch_avg_checkpoints.py +0 -0
  442. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/torch_export_to_onnx.py +0 -0
  443. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/torch_inspect_checkpoint.py +0 -0
  444. {returnn-1.20240205.153348 → returnn-1.20240206.450}/tools/torch_inspect_checkpoint_and_opt.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: returnn
3
- Version: 1.20240205.153348
3
+ Version: 1.20240206.450
4
4
  Summary: The RWTH extensible training framework for universal recurrent neural networks
5
5
  Home-page: https://github.com/rwth-i6/returnn/
6
6
  Author: Albert Zeyer
@@ -0,0 +1,2 @@
1
+ version = '1.20240206.000450'
2
+ long_version = '1.20240206.000450+git.b69cbd6'
@@ -11,7 +11,7 @@ import typing
11
11
 
12
12
  from returnn.util.basic import class_idx_seq_to_1_of_k, CollectionReadCheckCovered
13
13
  from returnn.log import log
14
- from returnn.tensor import Tensor, TensorDict
14
+ from returnn.tensor import Tensor, Dim, TensorDict
15
15
 
16
16
  from .util.feature_extraction import ExtractAudioFeatures
17
17
  from .util.vocabulary import *
@@ -967,14 +967,16 @@ class DummyGenericDataset(GeneratingDataset):
967
967
  data_template: Union[TensorDict, Dict[str, Union[Tensor, Dict[str, Any]]]],
968
968
  num_seqs: int,
969
969
  *,
970
- seq_lens: Optional[Union[int, Tuple[int, int], Dict[str, Union[int, Tuple[int, int]]]]] = None,
970
+ seq_lens: Union[None, int, Tuple[int, int], Dict[Union[str, Dim, None], Union[int, Tuple[int, int]]]] = None,
971
971
  **kwargs,
972
972
  ):
973
973
  """
974
974
  :param data_template: describes each tensor
975
975
  :param num_seqs:
976
- :param seq_lens: either fixed seq len, or take randint. per data key, or same for all data keys
976
+ :param seq_lens: either fixed seq len, or take randint. per data key, or per dim, or same for all
977
977
  """
978
+ from returnn.tensor.utils import tensor_dict_dims_random_seq_len_min_max
979
+
978
980
  data_template_ = TensorDict()
979
981
  data_template_.update(data_template, auto_convert=True)
980
982
  data_template = data_template_
@@ -982,19 +984,8 @@ class DummyGenericDataset(GeneratingDataset):
982
984
  old_style_dims = {k: (v.dim, v.ndim) for k, v in data_template.data.items()}
983
985
  super().__init__(input_dim=None, output_dim=old_style_dims, num_seqs=num_seqs, **kwargs)
984
986
  self.data_template = data_template
985
- if seq_lens is None:
986
- seq_lens = {}
987
- elif not isinstance(seq_lens, dict):
988
- seq_lens = {k: seq_lens for k in data_template.data.keys()}
989
- seq_lens = dict(seq_lens)
990
- for k, v in data_template.data.items():
991
- if k not in seq_lens:
992
- if v.shape in {(None,), (None, 1)} and v.dtype.startswith("float"):
993
- # Assume raw audio data samples, take longer seq lens by default, assume 16khz.
994
- seq_lens[k] = (1_000, 8_000)
995
- else:
996
- seq_lens[k] = (5, 15)
997
- self.seq_lens: Dict[str, Union[int, Tuple[int, int]]] = seq_lens
987
+ self.seq_lens = seq_lens
988
+ self._dyn_dims, self._dyn_lens_min_max = tensor_dict_dims_random_seq_len_min_max(data_template, seq_lens)
998
989
 
999
990
  def get_data_keys(self) -> List[str]:
1000
991
  """data keys"""
@@ -1028,19 +1019,13 @@ class DummyGenericDataset(GeneratingDataset):
1028
1019
 
1029
1020
  def _generate_data(self, key: str) -> numpy.ndarray:
1030
1021
  """generate for specific data key. assumes that self.random is in a correct state"""
1022
+ from returnn.tensor.utils import get_random_seq_lens_for_dyn_dims
1023
+
1024
+ seq_lens = get_random_seq_lens_for_dyn_dims(self._dyn_dims, self._dyn_lens_min_max, rnd=self.random)
1031
1025
  templ: Tensor = self.data_template.data[key]
1032
- shape = list(templ.shape)
1033
- for axis, dim in enumerate(shape):
1034
- if dim is None:
1035
- seq_len_gen = self.seq_lens.get(key, (5, 15))
1036
- if isinstance(seq_len_gen, int):
1037
- seq_len = seq_len_gen
1038
- elif isinstance(seq_len_gen, tuple):
1039
- assert len(seq_len_gen) == 2 # min and max
1040
- seq_len = self.random.randint(*seq_len_gen)
1041
- else:
1042
- raise TypeError(f"{self} generate: data key {key!r} seq_len {seq_len_gen!r} invalid")
1043
- shape[axis] = seq_len
1026
+ shape = [
1027
+ seq_lens[dim][0] if dim.is_dynamic() else dim.dimension for dim in templ.dims if not dim.is_batch_dim()
1028
+ ]
1044
1029
  if templ.sparse_dim:
1045
1030
  return self.random.randint(0, templ.sparse_dim.dimension, shape, dtype=templ.dtype)
1046
1031
  if templ.dtype.startswith("float"):
@@ -0,0 +1,237 @@
1
+ """
2
+ Some helper utils.
3
+ """
4
+
5
+ from __future__ import annotations
6
+ from typing import Optional, Union, Sequence, Dict, List, Tuple
7
+ import numpy
8
+ from returnn.tensor import Tensor, Dim, TensorDict, batch_dim
9
+
10
+
11
+ def tensor_dict_fill_random_numpy_(
12
+ tensor_dict: TensorDict,
13
+ *,
14
+ rnd: Union[int, numpy.random.RandomState] = 42,
15
+ dyn_dim_max_sizes: Optional[Dict[Dim, int]] = None,
16
+ dyn_dim_min_sizes: Optional[Dict[Dim, int]] = None,
17
+ ):
18
+ """
19
+ Random fill with NumPy arrays.
20
+
21
+ :param tensor_dict:
22
+ :param rnd:
23
+ :param dyn_dim_max_sizes: you can specify max sizes for dim tags with dynamic sizes.
24
+ The fill random code makes sure that there is at least one entry where we reach the max size,
25
+ so that the dim value will be the max size.
26
+ :param dyn_dim_min_sizes:
27
+ """
28
+ if not isinstance(rnd, numpy.random.RandomState):
29
+ rnd = numpy.random.RandomState(rnd)
30
+ for v in tensor_dict.data.values():
31
+ tensor_fill_random_numpy_(v, rnd=rnd, dyn_dim_max_sizes=dyn_dim_max_sizes, dyn_dim_min_sizes=dyn_dim_min_sizes)
32
+
33
+
34
+ def tensor_fill_random_numpy_(
35
+ x: Tensor,
36
+ *,
37
+ min_val: int = 0,
38
+ max_val: Optional[int] = None,
39
+ rnd: numpy.random.RandomState,
40
+ dyn_dim_max_sizes: Optional[Dict[Dim, int]] = None,
41
+ dyn_dim_min_sizes: Optional[Dict[Dim, int]] = None,
42
+ ) -> bool:
43
+ """fill. return whether sth was filled"""
44
+ if dyn_dim_max_sizes is None:
45
+ dyn_dim_max_sizes = {}
46
+ if dyn_dim_min_sizes is None:
47
+ dyn_dim_min_sizes = {}
48
+ filled = False
49
+ while True:
50
+ have_unfilled = False
51
+ filled_this_round = False
52
+
53
+ for dim in x.dims:
54
+ if dim.is_batch_dim() and not dim.dyn_size_ext:
55
+ dim.dyn_size_ext = Tensor("batch", [], dtype="int32")
56
+ if dim.is_dynamic() and not dim.dyn_size_ext:
57
+ dim.dyn_size_ext = Tensor(dim.name or "time", dims=[batch_dim], dtype="int32")
58
+ if not dim.dyn_size_ext:
59
+ continue
60
+ if tensor_fill_random_numpy_(
61
+ dim.dyn_size_ext,
62
+ min_val=dyn_dim_min_sizes.get(dim, 2),
63
+ max_val=dyn_dim_max_sizes.get(dim, None),
64
+ rnd=rnd,
65
+ dyn_dim_max_sizes=dyn_dim_max_sizes,
66
+ ):
67
+ if dim in dyn_dim_max_sizes:
68
+ # Make sure at least one of the dyn sizes matches the max size.
69
+ i = rnd.randint(0, dim.dyn_size_ext.raw_tensor.size)
70
+ dim.dyn_size_ext.raw_tensor.flat[i] = dyn_dim_max_sizes[dim]
71
+ if dim in dyn_dim_min_sizes:
72
+ j = rnd.randint(0, dim.dyn_size_ext.raw_tensor.size - 1)
73
+ if j >= i:
74
+ j += 1
75
+ dim.dyn_size_ext.raw_tensor.flat[j] = dyn_dim_min_sizes[dim]
76
+ elif dim in dyn_dim_min_sizes:
77
+ raise Exception(f"also define {dim} in dyn_dim_max_sizes, not just dyn_dim_min_sizes")
78
+ filled = True
79
+ filled_this_round = True
80
+ if dim.dyn_size_ext.raw_tensor is None:
81
+ have_unfilled = True
82
+ elif not isinstance(dim.dyn_size_ext.raw_tensor, numpy.ndarray):
83
+ have_unfilled = True
84
+
85
+ if have_unfilled:
86
+ assert filled_this_round, f"should have filled something, {x}"
87
+
88
+ if not have_unfilled:
89
+ break
90
+
91
+ if x.raw_tensor is not None:
92
+ if not isinstance(x.raw_tensor, numpy.ndarray):
93
+ x.raw_tensor = None
94
+
95
+ if x.raw_tensor is None:
96
+ shape = [d.get_dim_value() for d in x.dims]
97
+ if x.dtype.startswith("int"):
98
+ if max_val is None:
99
+ max_val = rnd.randint(5, 20)
100
+ if x.sparse_dim and x.sparse_dim.dimension is not None:
101
+ max_val = x.sparse_dim.dimension
102
+ x.raw_tensor = rnd.randint(min_val, max_val, size=shape, dtype=x.dtype)
103
+ elif x.dtype == "bool":
104
+ x.raw_tensor = rnd.randint(0, 2, size=shape, dtype=x.dtype)
105
+ elif x.dtype.startswith("float"):
106
+ x.raw_tensor = rnd.normal(0.0, 1.0, size=shape).astype(x.dtype)
107
+ elif x.dtype.startswith("complex"):
108
+ real = rnd.normal(0.0, 1.0, size=shape)
109
+ imag = rnd.normal(0.0, 1.0, size=shape)
110
+ x.raw_tensor = (real + 1j * imag).astype(x.dtype)
111
+ else:
112
+ raise NotImplementedError(f"not implemented for {x} dtype {x.dtype}")
113
+ filled = True
114
+
115
+ assert isinstance(x.raw_tensor, numpy.ndarray)
116
+
117
+ return filled
118
+
119
+
120
+ def tensor_dict_dims_random_seq_len_min_max(
121
+ tensor_dict: TensorDict,
122
+ seq_lens: Union[None, int, Tuple[int, int], Dict[Union[str, Dim, None], Union[int, Tuple[int, int]]]] = None,
123
+ ) -> Tuple[List[Dim], Dict[Dim, Tuple[int, int]]]:
124
+ """
125
+ This is specifically intended to prepare the list of all dynamic dims from the tensor dict
126
+ and the seq_len_min_max for :func:`get_random_seq_lens_for_dyn_dims`.
127
+
128
+ :param tensor_dict:
129
+ :param seq_lens: either fixed seq len, or take randint. per data key, or per dim, or same for all
130
+ :return: dims, seq_len_min_max
131
+ """
132
+ if seq_lens is None:
133
+ seq_lens = {}
134
+ if not isinstance(seq_lens, dict):
135
+ seq_lens = {None: seq_lens}
136
+ seq_lens: Dict[Union[str, Dim, None], Union[int, Tuple[int, int]]]
137
+
138
+ # Collect all dyn dim tags, including derived_from_op ones.
139
+ # The order will be sorted such that derived_from_op roots come first.
140
+ visited_dims = set()
141
+ dims = []
142
+ seq_len_min_max = {} # Also collect seq_len_min_max.
143
+ for k, v in tensor_dict.data.items():
144
+ for dim in v.dims:
145
+ if dim.is_dynamic() and dim not in visited_dims and not dim.is_batch_dim():
146
+ queue = [dim]
147
+ offset = len(dims)
148
+ while queue:
149
+ dim = queue.pop(0)
150
+ if not dim.is_dynamic():
151
+ continue
152
+ if dim in visited_dims:
153
+ continue
154
+ visited_dims.add(dim)
155
+ dims.insert(offset, dim)
156
+ dim.reset_batch_and_raw()
157
+ if dim.derived_from_op:
158
+ queue.extend(dim.derived_from_op.inputs)
159
+ else:
160
+ # Need to specify seq_len_min_max.
161
+ if dim in seq_lens or k in seq_lens or None in seq_lens:
162
+ if dim in seq_lens:
163
+ size = seq_lens[dim]
164
+ elif k in seq_lens:
165
+ size = seq_lens[k]
166
+ else:
167
+ size = seq_lens[None]
168
+ if isinstance(size, int):
169
+ size = (size, size)
170
+ else:
171
+ assert (
172
+ isinstance(size, tuple)
173
+ and len(size) == 2
174
+ and all(isinstance(s, int) for s in size)
175
+ and 0 <= size[0] <= size[1]
176
+ ), f"invalid size {size!r} in seq lens {seq_lens}"
177
+ else:
178
+ if v.shape in {(None,), (None, 1)} and v.dtype.startswith("float"):
179
+ # Assume raw audio data samples, take longer seq lens by default, assume 16khz.
180
+ size = (1_000, 8_000)
181
+ else:
182
+ size = (5, 15)
183
+ seq_len_min_max[dim] = size
184
+
185
+ return dims, seq_len_min_max
186
+
187
+
188
+ def get_random_seq_lens_for_dyn_dims(
189
+ dims: Sequence[Dim],
190
+ seq_len_min_max: Dict[Dim, Tuple[int, int]],
191
+ *,
192
+ batch_size: int = 1,
193
+ rnd: Union[int, numpy.random.RandomState] = 1337,
194
+ ) -> Dict[Dim, numpy.ndarray]:
195
+ """
196
+ Make random seq lens for dims.
197
+
198
+ Note that dim tags are not actually modified here,
199
+ as we need to have this in a safe way,
200
+ which might run in parallel to the main thread.
201
+
202
+ :param dims: Note that the order matter, as we use complete_dyn_size() (or equivalent).
203
+ :param seq_len_min_max:
204
+ :param batch_size:
205
+ :param rnd:
206
+ """
207
+ if not isinstance(rnd, numpy.random.RandomState):
208
+ rnd = numpy.random.RandomState(rnd)
209
+
210
+ gen_dims = {}
211
+ for dim in dims:
212
+ if dim not in gen_dims:
213
+ if dim.derived_from_op:
214
+ # If we get a KeyError for the following, the order of dims is invalid.
215
+ values = [gen_dims[dim_] for dim_ in dim.derived_from_op.inputs]
216
+ kind = dim.derived_from_op.kind
217
+ a = values[0]
218
+ for b in values[1:]:
219
+ if kind == "add":
220
+ a = numpy.maximum(a + b, 0)
221
+ elif kind == "sub":
222
+ a = numpy.maximum(a - b, 0)
223
+ elif kind == "mul":
224
+ a = a * b
225
+ elif kind in ("floordiv", "truediv"): # truediv assumes there is no remainder
226
+ a = a // b
227
+ elif kind == "ceildiv":
228
+ a = -(-a // b)
229
+ else:
230
+ raise ValueError("unknown op kind %r" % kind)
231
+ gen_dims[dim] = a
232
+ continue
233
+
234
+ min_, max_ = seq_len_min_max[dim]
235
+ gen_dims[dim] = rnd.randint(min_, max_ + 1, size=[batch_size], dtype=numpy.int32)
236
+
237
+ return gen_dims
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: returnn
3
- Version: 1.20240205.153348
3
+ Version: 1.20240206.450
4
4
  Summary: The RWTH extensible training framework for universal recurrent neural networks
5
5
  Home-page: https://github.com/rwth-i6/returnn/
6
6
  Author: Albert Zeyer
@@ -1,2 +0,0 @@
1
- version = '1.20240205.153348'
2
- long_version = '1.20240205.153348+git.6745251'
@@ -1,118 +0,0 @@
1
- """
2
- Some helper utils.
3
- """
4
-
5
-
6
- from __future__ import annotations
7
- from typing import Optional, Union, Dict
8
- import numpy
9
- from returnn.tensor import Tensor, Dim, TensorDict, batch_dim
10
-
11
-
12
- def tensor_dict_fill_random_numpy_(
13
- tensor_dict: TensorDict,
14
- *,
15
- rnd: Union[int, numpy.random.RandomState] = 42,
16
- dyn_dim_max_sizes: Optional[Dict[Dim, int]] = None,
17
- dyn_dim_min_sizes: Optional[Dict[Dim, int]] = None,
18
- ):
19
- """
20
- Random fill with NumPy arrays.
21
-
22
- :param tensor_dict:
23
- :param rnd:
24
- :param dyn_dim_max_sizes: you can specify max sizes for dim tags with dynamic sizes.
25
- The fill random code makes sure that there is at least one entry where we reach the max size,
26
- so that the dim value will be the max size.
27
- :param dyn_dim_min_sizes:
28
- """
29
- if not isinstance(rnd, numpy.random.RandomState):
30
- rnd = numpy.random.RandomState(rnd)
31
- for v in tensor_dict.data.values():
32
- tensor_fill_random_numpy_(v, rnd=rnd, dyn_dim_max_sizes=dyn_dim_max_sizes, dyn_dim_min_sizes=dyn_dim_min_sizes)
33
-
34
-
35
- def tensor_fill_random_numpy_(
36
- x: Tensor,
37
- *,
38
- min_val: int = 0,
39
- max_val: Optional[int] = None,
40
- rnd: numpy.random.RandomState,
41
- dyn_dim_max_sizes: Optional[Dict[Dim, int]] = None,
42
- dyn_dim_min_sizes: Optional[Dict[Dim, int]] = None,
43
- ) -> bool:
44
- """fill. return whether sth was filled"""
45
- if dyn_dim_max_sizes is None:
46
- dyn_dim_max_sizes = {}
47
- if dyn_dim_min_sizes is None:
48
- dyn_dim_min_sizes = {}
49
- filled = False
50
- while True:
51
- have_unfilled = False
52
- filled_this_round = False
53
-
54
- for dim in x.dims:
55
- if dim.is_batch_dim() and not dim.dyn_size_ext:
56
- dim.dyn_size_ext = Tensor("batch", [], dtype="int32")
57
- if dim.is_dynamic() and not dim.dyn_size_ext:
58
- dim.dyn_size_ext = Tensor(dim.name or "time", dims=[batch_dim], dtype="int32")
59
- if not dim.dyn_size_ext:
60
- continue
61
- if tensor_fill_random_numpy_(
62
- dim.dyn_size_ext,
63
- min_val=dyn_dim_min_sizes.get(dim, 2),
64
- max_val=dyn_dim_max_sizes.get(dim, None),
65
- rnd=rnd,
66
- dyn_dim_max_sizes=dyn_dim_max_sizes,
67
- ):
68
- if dim in dyn_dim_max_sizes:
69
- # Make sure at least one of the dyn sizes matches the max size.
70
- i = rnd.randint(0, dim.dyn_size_ext.raw_tensor.size)
71
- dim.dyn_size_ext.raw_tensor.flat[i] = dyn_dim_max_sizes[dim]
72
- if dim in dyn_dim_min_sizes:
73
- j = rnd.randint(0, dim.dyn_size_ext.raw_tensor.size - 1)
74
- if j >= i:
75
- j += 1
76
- dim.dyn_size_ext.raw_tensor.flat[j] = dyn_dim_min_sizes[dim]
77
- elif dim in dyn_dim_min_sizes:
78
- raise Exception(f"also define {dim} in dyn_dim_max_sizes, not just dyn_dim_min_sizes")
79
- filled = True
80
- filled_this_round = True
81
- if dim.dyn_size_ext.raw_tensor is None:
82
- have_unfilled = True
83
- elif not isinstance(dim.dyn_size_ext.raw_tensor, numpy.ndarray):
84
- have_unfilled = True
85
-
86
- if have_unfilled:
87
- assert filled_this_round, f"should have filled something, {x}"
88
-
89
- if not have_unfilled:
90
- break
91
-
92
- if x.raw_tensor is not None:
93
- if not isinstance(x.raw_tensor, numpy.ndarray):
94
- x.raw_tensor = None
95
-
96
- if x.raw_tensor is None:
97
- shape = [d.get_dim_value() for d in x.dims]
98
- if x.dtype.startswith("int"):
99
- if max_val is None:
100
- max_val = rnd.randint(5, 20)
101
- if x.sparse_dim and x.sparse_dim.dimension is not None:
102
- max_val = x.sparse_dim.dimension
103
- x.raw_tensor = rnd.randint(min_val, max_val, size=shape, dtype=x.dtype)
104
- elif x.dtype == "bool":
105
- x.raw_tensor = rnd.randint(0, 2, size=shape, dtype=x.dtype)
106
- elif x.dtype.startswith("float"):
107
- x.raw_tensor = rnd.normal(0.0, 1.0, size=shape).astype(x.dtype)
108
- elif x.dtype.startswith("complex"):
109
- real = rnd.normal(0.0, 1.0, size=shape)
110
- imag = rnd.normal(0.0, 1.0, size=shape)
111
- x.raw_tensor = (real + 1j * imag).astype(x.dtype)
112
- else:
113
- raise NotImplementedError(f"not implemented for {x} dtype {x.dtype}")
114
- filled = True
115
-
116
- assert isinstance(x.raw_tensor, numpy.ndarray)
117
-
118
- return filled