returnn 1.20240205.153348__tar.gz → 1.20240208.155745__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of returnn might be problematic. Click here for more details.
- {returnn-1.20240205.153348/returnn.egg-info → returnn-1.20240208.155745}/PKG-INFO +1 -1
- returnn-1.20240208.155745/_setup_info_generated.py +2 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/__main__.py +2 -2
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/datasets/generating.py +13 -28
- returnn-1.20240208.155745/returnn/tensor/utils.py +237 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745/returnn.egg-info}/PKG-INFO +1 -1
- returnn-1.20240205.153348/_setup_info_generated.py +0 -2
- returnn-1.20240205.153348/returnn/tensor/utils.py +0 -118
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/.editorconfig +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/.gitignore +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/.gitmodules +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/.kateconfig +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/CHANGELOG.md +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/CODEOWNERS +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/CONTRIBUTING.md +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/LICENSE +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/MANIFEST.in +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/README.rst +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/__init__.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/12AX.cluster_map +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/_setup_returnn_env.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-fwd.config +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-horovod-mpi.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-horovod-mpi.py.sh +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-horovod-mpi.sh +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-hyper-param-tuning.config +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-iter-dataset.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-list-devices.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-lua-torch-layer.config +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-pretrain.config +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-record-and-push-to-webserver.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-returnn-as-framework.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-rf-pt-benchmark.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-rf.config +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-rhn-enwik8.config +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-sprint-interface.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-tf-att-copy.config +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-tf-attention.config +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-tf-chunking-blstm.12ax.config +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-tf-contribrnn-lstm.12ax.config +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-tf-enc-dec.config +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-tf-hard-att-copy.config +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-tf-lstm-benchmark.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-tf-maxgradnorm-lstm.12ax.config +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-tf-native-lstm-lowmem.12ax.config +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-tf-native-lstm.12ax.config +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-tf-native-lstm2.12ax.config +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-tf-native-lstm2.12ax.tuned.config +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-tf-neural-transducer.12ax.config +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-tf-rec-explicit-lstm.config +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-tf-rec-explicit-rnn.config +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-tf-rec-self-att.config +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-tf-search-compiled-graph.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-tf-vanilla-lstm.12ax.config +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-timit-lstm-ctc.config +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-torch.config +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-upd-mult-model.lstm.12ax.config +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo.sh +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/mdlstm/IAM/IAM_lines/a01-000u-00.png +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/mdlstm/IAM/IAM_lines/a01-007-04.png +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/mdlstm/IAM/IAM_lines/a01-007-06.png +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/mdlstm/IAM/README.txt +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/mdlstm/IAM/chars.txt +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/mdlstm/IAM/config_demo +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/mdlstm/IAM/config_fwd +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/mdlstm/IAM/config_real +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/mdlstm/IAM/create_IAM_dataset.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/mdlstm/IAM/decode.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/mdlstm/IAM/features/raw/demo.h5 +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/mdlstm/IAM/go.sh +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/mdlstm/IAM/lines.txt +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/mdlstm/IAM/split/eval.txt +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/mdlstm/IAM/split/train.txt +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/mdlstm/IAM/split/valid.txt +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/mdlstm/README.md +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/mdlstm/artificial/create_test_h5.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/mdlstm/artificial/forwardconfig +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/mdlstm/artificial/go.sh +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/mdlstm/artificial/trainconfig +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/mdlstm/artificial_rgb/create_test_h5.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/mdlstm/artificial_rgb/forwardconfig +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/mdlstm/artificial_rgb/go.sh +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/mdlstm/artificial_rgb/trainconfig +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/pyproject.toml +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/requirements.txt +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/__init__.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/__old_mod_loader__.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/__setup__.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/config.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/datasets/__init__.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/datasets/audio.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/datasets/basic.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/datasets/bundle_file.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/datasets/cached.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/datasets/cached2.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/datasets/hdf.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/datasets/lm.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/datasets/map.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/datasets/meta.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/datasets/multi_proc.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/datasets/normalization_data.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/datasets/numpy_dump.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/datasets/raw_wav.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/datasets/sprint.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/datasets/stereo.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/datasets/util/__init__.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/datasets/util/feature_extraction.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/datasets/util/strings.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/datasets/util/vocabulary.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/engine/__init__.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/engine/base.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/engine/batch.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/extern/WarpRna/__init__.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/extern/WarpRna/__main__.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/extern/WarpRna/warp-rna/.git +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/extern/WarpRna/warp-rna/.gitignore +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/extern/WarpRna/warp-rna/LICENSE +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/extern/WarpRna/warp-rna/README.md +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/extern/WarpRna/warp-rna/aligner.gif +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/extern/WarpRna/warp-rna/check.png +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/extern/WarpRna/warp-rna/core.cu +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/extern/WarpRna/warp-rna/core.h +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/extern/WarpRna/warp-rna/core_cpu.cpp +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/extern/WarpRna/warp-rna/pytorch_binding/LICENSE +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/extern/WarpRna/warp-rna/pytorch_binding/MANIFEST.in +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/extern/WarpRna/warp-rna/pytorch_binding/README.md +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/extern/WarpRna/warp-rna/pytorch_binding/binding.cpp +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/extern/WarpRna/warp-rna/pytorch_binding/core.cu +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/extern/WarpRna/warp-rna/pytorch_binding/core.h +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/extern/WarpRna/warp-rna/pytorch_binding/requirements.txt +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/extern/WarpRna/warp-rna/pytorch_binding/setup.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/extern/WarpRna/warp-rna/pytorch_binding/warp_rna/__init__.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/extern/WarpRna/warp-rna/pytorch_binding/warp_rna/test.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/extern/WarpRna/warp-rna/ref_rna.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/extern/WarpRna/warp-rna/tensorflow_binding/setup.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/extern/WarpRna/warp-rna/tensorflow_binding/src/warp_rna_op.cc +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/extern/WarpRna/warp-rna/tensorflow_binding/src/warp_rna_op_kernel_tmpl.h +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/extern/WarpRna/warp-rna/tensorflow_binding/warp_rna/__init__.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/extern/WarpRna/warp-rna/test.cpp +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/extern/__init__.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/extern/graph_editor/README.md +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/extern/graph_editor/__init__.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/extern/graph_editor/edit.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/extern/graph_editor/reroute.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/extern/graph_editor/select.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/extern/graph_editor/subgraph.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/extern/graph_editor/transform.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/extern/graph_editor/util.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/forward_iface.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/__init__.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/_backend.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/_native/__init__.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/_native/backend.cpp +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/_native/backend.hpp +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/_native/module.cpp +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/_native/module.hpp +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/_native/py_utils.hpp +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/_native/tensor_ops.cpp +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/_native/tensor_ops.hpp +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/_numpy_backend.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/_random_journal.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/_utils.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/array_.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/attention.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/audio/__init__.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/audio/mel.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/audio/specaugment.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/backend.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/cond.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/const.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/container.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/control_flow_ctx.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/conv.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/decoder/__init__.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/decoder/transformer.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/device.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/dims.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/dropout.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/dtype.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/encoder/__init__.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/encoder/base.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/encoder/conformer.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/gradient.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/graph.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/hooks.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/init.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/label_smoothing.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/linear.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/loop.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/loss.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/math_.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/matmul.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/module.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/normalization.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/parameter.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/rand.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/rec.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/reduce.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/run_ctx.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/signal.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/state.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/tensor_array.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/frontend/types.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/import_/__init__.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/import_/common.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/import_/git.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/import_/import_.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/learning_rate_control.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/log.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/native_op.cpp +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/native_op.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/pretrain.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/sprint/__init__.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/sprint/cache.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/sprint/control.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/sprint/error_signals.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/sprint/extern_interface.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/sprint/interface.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tensor/README.md +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tensor/__init__.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tensor/_dim_extra.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tensor/_tensor_extra.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tensor/_tensor_mixin_base.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tensor/_tensor_op_overloads.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tensor/control_flow_ctx.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tensor/dim.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tensor/marked_dim.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tensor/tensor.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tensor/tensor_dict.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tf/__init__.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tf/compat.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tf/data_pipeline.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tf/distributed.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tf/engine.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tf/frontend_layers/README.md +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tf/frontend_layers/__init__.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tf/frontend_layers/_backend.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tf/frontend_layers/_utils.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tf/frontend_layers/cond.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tf/frontend_layers/config_entry_points.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tf/frontend_layers/debug_eager_mode.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tf/frontend_layers/dims.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tf/frontend_layers/layer.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tf/frontend_layers/loop.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tf/frontend_layers/make_layer.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tf/frontend_layers/masked_computation.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tf/frontend_layers/parameter_assign.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tf/frontend_layers/prev_tensor_ref.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tf/frontend_low_level/__init__.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tf/frontend_low_level/_backend.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tf/horovod.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tf/hyper_param_tuning.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tf/layers/__init__.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tf/layers/base.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tf/layers/basic.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tf/layers/rec.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tf/layers/segmental_model.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tf/layers/signal_processing.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tf/layers/variable.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tf/native_op.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tf/network.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tf/sprint.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tf/updater.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tf/util/__init__.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tf/util/basic.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tf/util/data.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tf/util/gradient_checkpoint.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tf/util/ken_lm.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/tf/util/open_fst.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/torch/README.md +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/torch/__init__.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/torch/data/__init__.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/torch/data/extern_data.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/torch/data/pipeline.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/torch/data/queued_data_iter.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/torch/data/returnn_dataset_wrapper.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/torch/data/tensor_utils.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/torch/distributed.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/torch/engine.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/torch/frontend/__init__.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/torch/frontend/_backend.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/torch/frontend/_rand.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/torch/frontend/bridge.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/torch/frontend/raw_ops.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/torch/updater.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/torch/util/README.md +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/torch/util/__init__.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/torch/util/diagnose_gpu.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/torch/util/scaled_gradient.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/util/__init__.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/util/basic.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/util/better_exchook.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/util/bpe.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/util/debug.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/util/debug_helpers.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/util/fsa.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/util/literal_py_to_pickle.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/util/math.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/util/multi_proc_non_daemonic_spawn.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/util/native_code_compiler.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/util/pprint.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/util/py-to-pickle.cpp +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/util/py_compat.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/util/py_ext_mod_compiler.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/util/result_with_reason.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/util/sig_proc.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/util/task_system.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/util/train_proc_manager.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn/util/watch_memory.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn.egg-info/SOURCES.txt +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn.egg-info/dependency_links.txt +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/returnn.egg-info/top_level.txt +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/rnn.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/setup.cfg +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/setup.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/DummySprintExec.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/PyCharm-inspection-profile.xml +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/PyCharm.idea/.gitignore +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/PyCharm.idea/.name +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/PyCharm.idea/codeStyleSettings.xml +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/PyCharm.idea/codeStyles/Project.xml +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/PyCharm.idea/codeStyles/codeStyleConfig.xml +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/PyCharm.idea/inspectionProfiles/Project_Default.xml +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/PyCharm.idea/inspectionProfiles/profiles_settings.xml +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/PyCharm.idea/misc.xml +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/PyCharm.idea/modules.xml +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/PyCharm.idea/returnn.iml +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/PyCharm.idea/scopes/scope_settings.xml +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/_set_num_threads1.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/_setup_returnn_env.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/_setup_test_env.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/bpe-unicode-demo.codes +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/bpe-unicode-demo.vocab +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/lexicon_opt.fst +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/lexicon_opt.isyms +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/lexicon_opt.jpg +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/lexicon_opt.osyms +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/lint_common.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/pycharm-inspect.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/pylint.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/returnn-as-framework.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/rf_utils.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/spelling.dic +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_Config.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_Dataset.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_Fsa.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_GeneratingDataset.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_HDFDataset.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_LearningRateControl.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_Log.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_MultiProcDataset.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_Pretrain.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_ResNet.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_SprintDataset.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_SprintInterface.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_TFEngine.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_TFNativeOp.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_TFNetworkLayer.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_TFNetworkRecLayer.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_TFNetworkSigProcLayer.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_TFUpdater.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_TFUtil.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_TF_determinism.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_TaskSystem.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_TaskSystem_SharedMem.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_TranslationDataset.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_Util.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_demos.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_fork_exec.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_hdf_dump.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_rf_array.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_rf_attention.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_rf_base.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_rf_cond.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_rf_const.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_rf_container.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_rf_conv.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_rf_encoder_conformer.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_rf_gradient.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_rf_label_smoothing.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_rf_loop.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_rf_math.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_rf_normalization.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_rf_rec.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_rf_reduce.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_rf_signal.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_tensor.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_tools.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_torch_dataset.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_torch_engine.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_torch_frontend.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tests/test_torch_internal_frontend.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/_setup_returnn_env.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/analyze-dataset-batches.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/bliss-collect-seq-lens.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/bliss-dump-text.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/bliss-get-segment-names.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/bliss-to-ogg-zip.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/bpe-create-lexicon.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/calculate-word-error-rate.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/cleanup-old-models.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/collect-orth-symbols.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/collect-words.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/compile_native_op.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/compile_tf_graph.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/debug-dump-search-scores.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/debug-plot-search-scores.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/dump-dataset-raw-strings.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/dump-dataset.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/dump-forward-stats.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/dump-forward.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/dump-network-json.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/dump-pickle.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/extract_state_tying_from_dataset.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/get-attention-weights.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/get-best-model-epoch.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/hdf_dump.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/hdf_dump_translation_dataset.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/import-blocks-mt-model.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/import-t2t-mt-model.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/lattice_rescorer/.gitignore +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/lattice_rescorer/Makefile +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/lattice_rescorer/README.md +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/lattice_rescorer/example/README.md +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/lattice_rescorer/example/libs_list +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/lattice_rescorer/example/network.040/i600_m600_m600.sgd_b16_lr0_cl2.newbobabs.config +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/lattice_rescorer/example/network.040/i600_m600_m600.sgd_b16_lr0_cl2.newbobabs.keep_over_epoch.lstm2.config +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/lattice_rescorer/example/rescore_lattice.sh +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/lattice_rescorer/example/state_vars_list +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/lattice_rescorer/example/tensor_names_list +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/lattice_rescorer/file.h +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/lattice_rescorer/htklatticerescorer.cc +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/lattice_rescorer/htklatticerescorer.h +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/lattice_rescorer/main.cc +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/lattice_rescorer/rescorer.h +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/lattice_rescorer/vocabulary.cc +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/lattice_rescorer/vocabulary.h +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/tf_avg_checkpoints.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/tf_inspect_checkpoint.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/tf_inspect_summary_log.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/torch_avg_checkpoints.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/torch_export_to_onnx.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/torch_inspect_checkpoint.py +0 -0
- {returnn-1.20240205.153348 → returnn-1.20240208.155745}/tools/torch_inspect_checkpoint_and_opt.py +0 -0
|
@@ -737,8 +737,8 @@ def main(argv=None):
|
|
|
737
737
|
execute_main_task()
|
|
738
738
|
except KeyboardInterrupt:
|
|
739
739
|
return_code = 1
|
|
740
|
-
print("KeyboardInterrupt", file=getattr(log, "v3", sys.stderr)
|
|
741
|
-
if getattr(log, "verbose",
|
|
740
|
+
print("KeyboardInterrupt", file=getattr(log, "v3", None) or sys.stderr)
|
|
741
|
+
if getattr(log, "verbose", None) and log.verbose[5]:
|
|
742
742
|
sys.excepthook(*sys.exc_info())
|
|
743
743
|
finalize(error_occurred=return_code != 0)
|
|
744
744
|
if return_code:
|
|
@@ -11,7 +11,7 @@ import typing
|
|
|
11
11
|
|
|
12
12
|
from returnn.util.basic import class_idx_seq_to_1_of_k, CollectionReadCheckCovered
|
|
13
13
|
from returnn.log import log
|
|
14
|
-
from returnn.tensor import Tensor, TensorDict
|
|
14
|
+
from returnn.tensor import Tensor, Dim, TensorDict
|
|
15
15
|
|
|
16
16
|
from .util.feature_extraction import ExtractAudioFeatures
|
|
17
17
|
from .util.vocabulary import *
|
|
@@ -967,14 +967,16 @@ class DummyGenericDataset(GeneratingDataset):
|
|
|
967
967
|
data_template: Union[TensorDict, Dict[str, Union[Tensor, Dict[str, Any]]]],
|
|
968
968
|
num_seqs: int,
|
|
969
969
|
*,
|
|
970
|
-
seq_lens:
|
|
970
|
+
seq_lens: Union[None, int, Tuple[int, int], Dict[Union[str, Dim, None], Union[int, Tuple[int, int]]]] = None,
|
|
971
971
|
**kwargs,
|
|
972
972
|
):
|
|
973
973
|
"""
|
|
974
974
|
:param data_template: describes each tensor
|
|
975
975
|
:param num_seqs:
|
|
976
|
-
:param seq_lens: either fixed seq len, or take randint. per data key, or same for all
|
|
976
|
+
:param seq_lens: either fixed seq len, or take randint. per data key, or per dim, or same for all
|
|
977
977
|
"""
|
|
978
|
+
from returnn.tensor.utils import tensor_dict_dims_random_seq_len_min_max
|
|
979
|
+
|
|
978
980
|
data_template_ = TensorDict()
|
|
979
981
|
data_template_.update(data_template, auto_convert=True)
|
|
980
982
|
data_template = data_template_
|
|
@@ -982,19 +984,8 @@ class DummyGenericDataset(GeneratingDataset):
|
|
|
982
984
|
old_style_dims = {k: (v.dim, v.ndim) for k, v in data_template.data.items()}
|
|
983
985
|
super().__init__(input_dim=None, output_dim=old_style_dims, num_seqs=num_seqs, **kwargs)
|
|
984
986
|
self.data_template = data_template
|
|
985
|
-
|
|
986
|
-
|
|
987
|
-
elif not isinstance(seq_lens, dict):
|
|
988
|
-
seq_lens = {k: seq_lens for k in data_template.data.keys()}
|
|
989
|
-
seq_lens = dict(seq_lens)
|
|
990
|
-
for k, v in data_template.data.items():
|
|
991
|
-
if k not in seq_lens:
|
|
992
|
-
if v.shape in {(None,), (None, 1)} and v.dtype.startswith("float"):
|
|
993
|
-
# Assume raw audio data samples, take longer seq lens by default, assume 16khz.
|
|
994
|
-
seq_lens[k] = (1_000, 8_000)
|
|
995
|
-
else:
|
|
996
|
-
seq_lens[k] = (5, 15)
|
|
997
|
-
self.seq_lens: Dict[str, Union[int, Tuple[int, int]]] = seq_lens
|
|
987
|
+
self.seq_lens = seq_lens
|
|
988
|
+
self._dyn_dims, self._dyn_lens_min_max = tensor_dict_dims_random_seq_len_min_max(data_template, seq_lens)
|
|
998
989
|
|
|
999
990
|
def get_data_keys(self) -> List[str]:
|
|
1000
991
|
"""data keys"""
|
|
@@ -1028,19 +1019,13 @@ class DummyGenericDataset(GeneratingDataset):
|
|
|
1028
1019
|
|
|
1029
1020
|
def _generate_data(self, key: str) -> numpy.ndarray:
|
|
1030
1021
|
"""generate for specific data key. assumes that self.random is in a correct state"""
|
|
1022
|
+
from returnn.tensor.utils import get_random_seq_lens_for_dyn_dims
|
|
1023
|
+
|
|
1024
|
+
seq_lens = get_random_seq_lens_for_dyn_dims(self._dyn_dims, self._dyn_lens_min_max, rnd=self.random)
|
|
1031
1025
|
templ: Tensor = self.data_template.data[key]
|
|
1032
|
-
shape =
|
|
1033
|
-
|
|
1034
|
-
|
|
1035
|
-
seq_len_gen = self.seq_lens.get(key, (5, 15))
|
|
1036
|
-
if isinstance(seq_len_gen, int):
|
|
1037
|
-
seq_len = seq_len_gen
|
|
1038
|
-
elif isinstance(seq_len_gen, tuple):
|
|
1039
|
-
assert len(seq_len_gen) == 2 # min and max
|
|
1040
|
-
seq_len = self.random.randint(*seq_len_gen)
|
|
1041
|
-
else:
|
|
1042
|
-
raise TypeError(f"{self} generate: data key {key!r} seq_len {seq_len_gen!r} invalid")
|
|
1043
|
-
shape[axis] = seq_len
|
|
1026
|
+
shape = [
|
|
1027
|
+
seq_lens[dim][0] if dim.is_dynamic() else dim.dimension for dim in templ.dims if not dim.is_batch_dim()
|
|
1028
|
+
]
|
|
1044
1029
|
if templ.sparse_dim:
|
|
1045
1030
|
return self.random.randint(0, templ.sparse_dim.dimension, shape, dtype=templ.dtype)
|
|
1046
1031
|
if templ.dtype.startswith("float"):
|
|
@@ -0,0 +1,237 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Some helper utils.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
from typing import Optional, Union, Sequence, Dict, List, Tuple
|
|
7
|
+
import numpy
|
|
8
|
+
from returnn.tensor import Tensor, Dim, TensorDict, batch_dim
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def tensor_dict_fill_random_numpy_(
|
|
12
|
+
tensor_dict: TensorDict,
|
|
13
|
+
*,
|
|
14
|
+
rnd: Union[int, numpy.random.RandomState] = 42,
|
|
15
|
+
dyn_dim_max_sizes: Optional[Dict[Dim, int]] = None,
|
|
16
|
+
dyn_dim_min_sizes: Optional[Dict[Dim, int]] = None,
|
|
17
|
+
):
|
|
18
|
+
"""
|
|
19
|
+
Random fill with NumPy arrays.
|
|
20
|
+
|
|
21
|
+
:param tensor_dict:
|
|
22
|
+
:param rnd:
|
|
23
|
+
:param dyn_dim_max_sizes: you can specify max sizes for dim tags with dynamic sizes.
|
|
24
|
+
The fill random code makes sure that there is at least one entry where we reach the max size,
|
|
25
|
+
so that the dim value will be the max size.
|
|
26
|
+
:param dyn_dim_min_sizes:
|
|
27
|
+
"""
|
|
28
|
+
if not isinstance(rnd, numpy.random.RandomState):
|
|
29
|
+
rnd = numpy.random.RandomState(rnd)
|
|
30
|
+
for v in tensor_dict.data.values():
|
|
31
|
+
tensor_fill_random_numpy_(v, rnd=rnd, dyn_dim_max_sizes=dyn_dim_max_sizes, dyn_dim_min_sizes=dyn_dim_min_sizes)
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def tensor_fill_random_numpy_(
|
|
35
|
+
x: Tensor,
|
|
36
|
+
*,
|
|
37
|
+
min_val: int = 0,
|
|
38
|
+
max_val: Optional[int] = None,
|
|
39
|
+
rnd: numpy.random.RandomState,
|
|
40
|
+
dyn_dim_max_sizes: Optional[Dict[Dim, int]] = None,
|
|
41
|
+
dyn_dim_min_sizes: Optional[Dict[Dim, int]] = None,
|
|
42
|
+
) -> bool:
|
|
43
|
+
"""fill. return whether sth was filled"""
|
|
44
|
+
if dyn_dim_max_sizes is None:
|
|
45
|
+
dyn_dim_max_sizes = {}
|
|
46
|
+
if dyn_dim_min_sizes is None:
|
|
47
|
+
dyn_dim_min_sizes = {}
|
|
48
|
+
filled = False
|
|
49
|
+
while True:
|
|
50
|
+
have_unfilled = False
|
|
51
|
+
filled_this_round = False
|
|
52
|
+
|
|
53
|
+
for dim in x.dims:
|
|
54
|
+
if dim.is_batch_dim() and not dim.dyn_size_ext:
|
|
55
|
+
dim.dyn_size_ext = Tensor("batch", [], dtype="int32")
|
|
56
|
+
if dim.is_dynamic() and not dim.dyn_size_ext:
|
|
57
|
+
dim.dyn_size_ext = Tensor(dim.name or "time", dims=[batch_dim], dtype="int32")
|
|
58
|
+
if not dim.dyn_size_ext:
|
|
59
|
+
continue
|
|
60
|
+
if tensor_fill_random_numpy_(
|
|
61
|
+
dim.dyn_size_ext,
|
|
62
|
+
min_val=dyn_dim_min_sizes.get(dim, 2),
|
|
63
|
+
max_val=dyn_dim_max_sizes.get(dim, None),
|
|
64
|
+
rnd=rnd,
|
|
65
|
+
dyn_dim_max_sizes=dyn_dim_max_sizes,
|
|
66
|
+
):
|
|
67
|
+
if dim in dyn_dim_max_sizes:
|
|
68
|
+
# Make sure at least one of the dyn sizes matches the max size.
|
|
69
|
+
i = rnd.randint(0, dim.dyn_size_ext.raw_tensor.size)
|
|
70
|
+
dim.dyn_size_ext.raw_tensor.flat[i] = dyn_dim_max_sizes[dim]
|
|
71
|
+
if dim in dyn_dim_min_sizes:
|
|
72
|
+
j = rnd.randint(0, dim.dyn_size_ext.raw_tensor.size - 1)
|
|
73
|
+
if j >= i:
|
|
74
|
+
j += 1
|
|
75
|
+
dim.dyn_size_ext.raw_tensor.flat[j] = dyn_dim_min_sizes[dim]
|
|
76
|
+
elif dim in dyn_dim_min_sizes:
|
|
77
|
+
raise Exception(f"also define {dim} in dyn_dim_max_sizes, not just dyn_dim_min_sizes")
|
|
78
|
+
filled = True
|
|
79
|
+
filled_this_round = True
|
|
80
|
+
if dim.dyn_size_ext.raw_tensor is None:
|
|
81
|
+
have_unfilled = True
|
|
82
|
+
elif not isinstance(dim.dyn_size_ext.raw_tensor, numpy.ndarray):
|
|
83
|
+
have_unfilled = True
|
|
84
|
+
|
|
85
|
+
if have_unfilled:
|
|
86
|
+
assert filled_this_round, f"should have filled something, {x}"
|
|
87
|
+
|
|
88
|
+
if not have_unfilled:
|
|
89
|
+
break
|
|
90
|
+
|
|
91
|
+
if x.raw_tensor is not None:
|
|
92
|
+
if not isinstance(x.raw_tensor, numpy.ndarray):
|
|
93
|
+
x.raw_tensor = None
|
|
94
|
+
|
|
95
|
+
if x.raw_tensor is None:
|
|
96
|
+
shape = [d.get_dim_value() for d in x.dims]
|
|
97
|
+
if x.dtype.startswith("int"):
|
|
98
|
+
if max_val is None:
|
|
99
|
+
max_val = rnd.randint(5, 20)
|
|
100
|
+
if x.sparse_dim and x.sparse_dim.dimension is not None:
|
|
101
|
+
max_val = x.sparse_dim.dimension
|
|
102
|
+
x.raw_tensor = rnd.randint(min_val, max_val, size=shape, dtype=x.dtype)
|
|
103
|
+
elif x.dtype == "bool":
|
|
104
|
+
x.raw_tensor = rnd.randint(0, 2, size=shape, dtype=x.dtype)
|
|
105
|
+
elif x.dtype.startswith("float"):
|
|
106
|
+
x.raw_tensor = rnd.normal(0.0, 1.0, size=shape).astype(x.dtype)
|
|
107
|
+
elif x.dtype.startswith("complex"):
|
|
108
|
+
real = rnd.normal(0.0, 1.0, size=shape)
|
|
109
|
+
imag = rnd.normal(0.0, 1.0, size=shape)
|
|
110
|
+
x.raw_tensor = (real + 1j * imag).astype(x.dtype)
|
|
111
|
+
else:
|
|
112
|
+
raise NotImplementedError(f"not implemented for {x} dtype {x.dtype}")
|
|
113
|
+
filled = True
|
|
114
|
+
|
|
115
|
+
assert isinstance(x.raw_tensor, numpy.ndarray)
|
|
116
|
+
|
|
117
|
+
return filled
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def tensor_dict_dims_random_seq_len_min_max(
|
|
121
|
+
tensor_dict: TensorDict,
|
|
122
|
+
seq_lens: Union[None, int, Tuple[int, int], Dict[Union[str, Dim, None], Union[int, Tuple[int, int]]]] = None,
|
|
123
|
+
) -> Tuple[List[Dim], Dict[Dim, Tuple[int, int]]]:
|
|
124
|
+
"""
|
|
125
|
+
This is specifically intended to prepare the list of all dynamic dims from the tensor dict
|
|
126
|
+
and the seq_len_min_max for :func:`get_random_seq_lens_for_dyn_dims`.
|
|
127
|
+
|
|
128
|
+
:param tensor_dict:
|
|
129
|
+
:param seq_lens: either fixed seq len, or take randint. per data key, or per dim, or same for all
|
|
130
|
+
:return: dims, seq_len_min_max
|
|
131
|
+
"""
|
|
132
|
+
if seq_lens is None:
|
|
133
|
+
seq_lens = {}
|
|
134
|
+
if not isinstance(seq_lens, dict):
|
|
135
|
+
seq_lens = {None: seq_lens}
|
|
136
|
+
seq_lens: Dict[Union[str, Dim, None], Union[int, Tuple[int, int]]]
|
|
137
|
+
|
|
138
|
+
# Collect all dyn dim tags, including derived_from_op ones.
|
|
139
|
+
# The order will be sorted such that derived_from_op roots come first.
|
|
140
|
+
visited_dims = set()
|
|
141
|
+
dims = []
|
|
142
|
+
seq_len_min_max = {} # Also collect seq_len_min_max.
|
|
143
|
+
for k, v in tensor_dict.data.items():
|
|
144
|
+
for dim in v.dims:
|
|
145
|
+
if dim.is_dynamic() and dim not in visited_dims and not dim.is_batch_dim():
|
|
146
|
+
queue = [dim]
|
|
147
|
+
offset = len(dims)
|
|
148
|
+
while queue:
|
|
149
|
+
dim = queue.pop(0)
|
|
150
|
+
if not dim.is_dynamic():
|
|
151
|
+
continue
|
|
152
|
+
if dim in visited_dims:
|
|
153
|
+
continue
|
|
154
|
+
visited_dims.add(dim)
|
|
155
|
+
dims.insert(offset, dim)
|
|
156
|
+
dim.reset_batch_and_raw()
|
|
157
|
+
if dim.derived_from_op:
|
|
158
|
+
queue.extend(dim.derived_from_op.inputs)
|
|
159
|
+
else:
|
|
160
|
+
# Need to specify seq_len_min_max.
|
|
161
|
+
if dim in seq_lens or k in seq_lens or None in seq_lens:
|
|
162
|
+
if dim in seq_lens:
|
|
163
|
+
size = seq_lens[dim]
|
|
164
|
+
elif k in seq_lens:
|
|
165
|
+
size = seq_lens[k]
|
|
166
|
+
else:
|
|
167
|
+
size = seq_lens[None]
|
|
168
|
+
if isinstance(size, int):
|
|
169
|
+
size = (size, size)
|
|
170
|
+
else:
|
|
171
|
+
assert (
|
|
172
|
+
isinstance(size, tuple)
|
|
173
|
+
and len(size) == 2
|
|
174
|
+
and all(isinstance(s, int) for s in size)
|
|
175
|
+
and 0 <= size[0] <= size[1]
|
|
176
|
+
), f"invalid size {size!r} in seq lens {seq_lens}"
|
|
177
|
+
else:
|
|
178
|
+
if v.shape in {(None,), (None, 1)} and v.dtype.startswith("float"):
|
|
179
|
+
# Assume raw audio data samples, take longer seq lens by default, assume 16khz.
|
|
180
|
+
size = (1_000, 8_000)
|
|
181
|
+
else:
|
|
182
|
+
size = (5, 15)
|
|
183
|
+
seq_len_min_max[dim] = size
|
|
184
|
+
|
|
185
|
+
return dims, seq_len_min_max
|
|
186
|
+
|
|
187
|
+
|
|
188
|
+
def get_random_seq_lens_for_dyn_dims(
|
|
189
|
+
dims: Sequence[Dim],
|
|
190
|
+
seq_len_min_max: Dict[Dim, Tuple[int, int]],
|
|
191
|
+
*,
|
|
192
|
+
batch_size: int = 1,
|
|
193
|
+
rnd: Union[int, numpy.random.RandomState] = 1337,
|
|
194
|
+
) -> Dict[Dim, numpy.ndarray]:
|
|
195
|
+
"""
|
|
196
|
+
Make random seq lens for dims.
|
|
197
|
+
|
|
198
|
+
Note that dim tags are not actually modified here,
|
|
199
|
+
as we need to have this in a safe way,
|
|
200
|
+
which might run in parallel to the main thread.
|
|
201
|
+
|
|
202
|
+
:param dims: Note that the order matter, as we use complete_dyn_size() (or equivalent).
|
|
203
|
+
:param seq_len_min_max:
|
|
204
|
+
:param batch_size:
|
|
205
|
+
:param rnd:
|
|
206
|
+
"""
|
|
207
|
+
if not isinstance(rnd, numpy.random.RandomState):
|
|
208
|
+
rnd = numpy.random.RandomState(rnd)
|
|
209
|
+
|
|
210
|
+
gen_dims = {}
|
|
211
|
+
for dim in dims:
|
|
212
|
+
if dim not in gen_dims:
|
|
213
|
+
if dim.derived_from_op:
|
|
214
|
+
# If we get a KeyError for the following, the order of dims is invalid.
|
|
215
|
+
values = [gen_dims[dim_] for dim_ in dim.derived_from_op.inputs]
|
|
216
|
+
kind = dim.derived_from_op.kind
|
|
217
|
+
a = values[0]
|
|
218
|
+
for b in values[1:]:
|
|
219
|
+
if kind == "add":
|
|
220
|
+
a = numpy.maximum(a + b, 0)
|
|
221
|
+
elif kind == "sub":
|
|
222
|
+
a = numpy.maximum(a - b, 0)
|
|
223
|
+
elif kind == "mul":
|
|
224
|
+
a = a * b
|
|
225
|
+
elif kind in ("floordiv", "truediv"): # truediv assumes there is no remainder
|
|
226
|
+
a = a // b
|
|
227
|
+
elif kind == "ceildiv":
|
|
228
|
+
a = -(-a // b)
|
|
229
|
+
else:
|
|
230
|
+
raise ValueError("unknown op kind %r" % kind)
|
|
231
|
+
gen_dims[dim] = a
|
|
232
|
+
continue
|
|
233
|
+
|
|
234
|
+
min_, max_ = seq_len_min_max[dim]
|
|
235
|
+
gen_dims[dim] = rnd.randint(min_, max_ + 1, size=[batch_size], dtype=numpy.int32)
|
|
236
|
+
|
|
237
|
+
return gen_dims
|
|
@@ -1,118 +0,0 @@
|
|
|
1
|
-
"""
|
|
2
|
-
Some helper utils.
|
|
3
|
-
"""
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
from __future__ import annotations
|
|
7
|
-
from typing import Optional, Union, Dict
|
|
8
|
-
import numpy
|
|
9
|
-
from returnn.tensor import Tensor, Dim, TensorDict, batch_dim
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
def tensor_dict_fill_random_numpy_(
|
|
13
|
-
tensor_dict: TensorDict,
|
|
14
|
-
*,
|
|
15
|
-
rnd: Union[int, numpy.random.RandomState] = 42,
|
|
16
|
-
dyn_dim_max_sizes: Optional[Dict[Dim, int]] = None,
|
|
17
|
-
dyn_dim_min_sizes: Optional[Dict[Dim, int]] = None,
|
|
18
|
-
):
|
|
19
|
-
"""
|
|
20
|
-
Random fill with NumPy arrays.
|
|
21
|
-
|
|
22
|
-
:param tensor_dict:
|
|
23
|
-
:param rnd:
|
|
24
|
-
:param dyn_dim_max_sizes: you can specify max sizes for dim tags with dynamic sizes.
|
|
25
|
-
The fill random code makes sure that there is at least one entry where we reach the max size,
|
|
26
|
-
so that the dim value will be the max size.
|
|
27
|
-
:param dyn_dim_min_sizes:
|
|
28
|
-
"""
|
|
29
|
-
if not isinstance(rnd, numpy.random.RandomState):
|
|
30
|
-
rnd = numpy.random.RandomState(rnd)
|
|
31
|
-
for v in tensor_dict.data.values():
|
|
32
|
-
tensor_fill_random_numpy_(v, rnd=rnd, dyn_dim_max_sizes=dyn_dim_max_sizes, dyn_dim_min_sizes=dyn_dim_min_sizes)
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
def tensor_fill_random_numpy_(
|
|
36
|
-
x: Tensor,
|
|
37
|
-
*,
|
|
38
|
-
min_val: int = 0,
|
|
39
|
-
max_val: Optional[int] = None,
|
|
40
|
-
rnd: numpy.random.RandomState,
|
|
41
|
-
dyn_dim_max_sizes: Optional[Dict[Dim, int]] = None,
|
|
42
|
-
dyn_dim_min_sizes: Optional[Dict[Dim, int]] = None,
|
|
43
|
-
) -> bool:
|
|
44
|
-
"""fill. return whether sth was filled"""
|
|
45
|
-
if dyn_dim_max_sizes is None:
|
|
46
|
-
dyn_dim_max_sizes = {}
|
|
47
|
-
if dyn_dim_min_sizes is None:
|
|
48
|
-
dyn_dim_min_sizes = {}
|
|
49
|
-
filled = False
|
|
50
|
-
while True:
|
|
51
|
-
have_unfilled = False
|
|
52
|
-
filled_this_round = False
|
|
53
|
-
|
|
54
|
-
for dim in x.dims:
|
|
55
|
-
if dim.is_batch_dim() and not dim.dyn_size_ext:
|
|
56
|
-
dim.dyn_size_ext = Tensor("batch", [], dtype="int32")
|
|
57
|
-
if dim.is_dynamic() and not dim.dyn_size_ext:
|
|
58
|
-
dim.dyn_size_ext = Tensor(dim.name or "time", dims=[batch_dim], dtype="int32")
|
|
59
|
-
if not dim.dyn_size_ext:
|
|
60
|
-
continue
|
|
61
|
-
if tensor_fill_random_numpy_(
|
|
62
|
-
dim.dyn_size_ext,
|
|
63
|
-
min_val=dyn_dim_min_sizes.get(dim, 2),
|
|
64
|
-
max_val=dyn_dim_max_sizes.get(dim, None),
|
|
65
|
-
rnd=rnd,
|
|
66
|
-
dyn_dim_max_sizes=dyn_dim_max_sizes,
|
|
67
|
-
):
|
|
68
|
-
if dim in dyn_dim_max_sizes:
|
|
69
|
-
# Make sure at least one of the dyn sizes matches the max size.
|
|
70
|
-
i = rnd.randint(0, dim.dyn_size_ext.raw_tensor.size)
|
|
71
|
-
dim.dyn_size_ext.raw_tensor.flat[i] = dyn_dim_max_sizes[dim]
|
|
72
|
-
if dim in dyn_dim_min_sizes:
|
|
73
|
-
j = rnd.randint(0, dim.dyn_size_ext.raw_tensor.size - 1)
|
|
74
|
-
if j >= i:
|
|
75
|
-
j += 1
|
|
76
|
-
dim.dyn_size_ext.raw_tensor.flat[j] = dyn_dim_min_sizes[dim]
|
|
77
|
-
elif dim in dyn_dim_min_sizes:
|
|
78
|
-
raise Exception(f"also define {dim} in dyn_dim_max_sizes, not just dyn_dim_min_sizes")
|
|
79
|
-
filled = True
|
|
80
|
-
filled_this_round = True
|
|
81
|
-
if dim.dyn_size_ext.raw_tensor is None:
|
|
82
|
-
have_unfilled = True
|
|
83
|
-
elif not isinstance(dim.dyn_size_ext.raw_tensor, numpy.ndarray):
|
|
84
|
-
have_unfilled = True
|
|
85
|
-
|
|
86
|
-
if have_unfilled:
|
|
87
|
-
assert filled_this_round, f"should have filled something, {x}"
|
|
88
|
-
|
|
89
|
-
if not have_unfilled:
|
|
90
|
-
break
|
|
91
|
-
|
|
92
|
-
if x.raw_tensor is not None:
|
|
93
|
-
if not isinstance(x.raw_tensor, numpy.ndarray):
|
|
94
|
-
x.raw_tensor = None
|
|
95
|
-
|
|
96
|
-
if x.raw_tensor is None:
|
|
97
|
-
shape = [d.get_dim_value() for d in x.dims]
|
|
98
|
-
if x.dtype.startswith("int"):
|
|
99
|
-
if max_val is None:
|
|
100
|
-
max_val = rnd.randint(5, 20)
|
|
101
|
-
if x.sparse_dim and x.sparse_dim.dimension is not None:
|
|
102
|
-
max_val = x.sparse_dim.dimension
|
|
103
|
-
x.raw_tensor = rnd.randint(min_val, max_val, size=shape, dtype=x.dtype)
|
|
104
|
-
elif x.dtype == "bool":
|
|
105
|
-
x.raw_tensor = rnd.randint(0, 2, size=shape, dtype=x.dtype)
|
|
106
|
-
elif x.dtype.startswith("float"):
|
|
107
|
-
x.raw_tensor = rnd.normal(0.0, 1.0, size=shape).astype(x.dtype)
|
|
108
|
-
elif x.dtype.startswith("complex"):
|
|
109
|
-
real = rnd.normal(0.0, 1.0, size=shape)
|
|
110
|
-
imag = rnd.normal(0.0, 1.0, size=shape)
|
|
111
|
-
x.raw_tensor = (real + 1j * imag).astype(x.dtype)
|
|
112
|
-
else:
|
|
113
|
-
raise NotImplementedError(f"not implemented for {x} dtype {x.dtype}")
|
|
114
|
-
filled = True
|
|
115
|
-
|
|
116
|
-
assert isinstance(x.raw_tensor, numpy.ndarray)
|
|
117
|
-
|
|
118
|
-
return filled
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-hyper-param-tuning.config
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-record-and-push-to-webserver.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-tf-chunking-blstm.12ax.config
RENAMED
|
File without changes
|
{returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-tf-contribrnn-lstm.12ax.config
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-tf-maxgradnorm-lstm.12ax.config
RENAMED
|
File without changes
|
{returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-tf-native-lstm-lowmem.12ax.config
RENAMED
|
File without changes
|
{returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-tf-native-lstm.12ax.config
RENAMED
|
File without changes
|
{returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-tf-native-lstm2.12ax.config
RENAMED
|
File without changes
|
{returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-tf-native-lstm2.12ax.tuned.config
RENAMED
|
File without changes
|
{returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-tf-neural-transducer.12ax.config
RENAMED
|
File without changes
|
{returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-tf-rec-explicit-lstm.config
RENAMED
|
File without changes
|
{returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-tf-rec-explicit-rnn.config
RENAMED
|
File without changes
|
|
File without changes
|
{returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-tf-search-compiled-graph.py
RENAMED
|
File without changes
|
{returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-tf-vanilla-lstm.12ax.config
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
{returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/demo-upd-mult-model.lstm.12ax.config
RENAMED
|
File without changes
|
|
File without changes
|
{returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/mdlstm/IAM/IAM_lines/a01-000u-00.png
RENAMED
|
File without changes
|
{returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/mdlstm/IAM/IAM_lines/a01-007-04.png
RENAMED
|
File without changes
|
{returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/mdlstm/IAM/IAM_lines/a01-007-06.png
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/mdlstm/IAM/create_IAM_dataset.py
RENAMED
|
File without changes
|
|
File without changes
|
{returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/mdlstm/IAM/features/raw/demo.h5
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/mdlstm/artificial/create_test_h5.py
RENAMED
|
File without changes
|
{returnn-1.20240205.153348 → returnn-1.20240208.155745}/demos/mdlstm/artificial/forwardconfig
RENAMED
|
File without changes
|