returnn 1.20231220.31131__tar.gz → 1.20231220.85618__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of returnn might be problematic. Click here for more details.
- {returnn-1.20231220.31131/returnn.egg-info → returnn-1.20231220.85618}/PKG-INFO +1 -1
- returnn-1.20231220.85618/_setup_info_generated.py +2 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/attention.py +5 -0
- returnn-1.20231220.85618/returnn/frontend/decoder/__init__.py +3 -0
- returnn-1.20231220.85618/returnn/frontend/decoder/transformer.py +281 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618/returnn.egg-info}/PKG-INFO +1 -1
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn.egg-info/SOURCES.txt +2 -0
- returnn-1.20231220.31131/_setup_info_generated.py +0 -2
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/.editorconfig +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/.gitignore +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/.gitmodules +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/.kateconfig +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/CHANGELOG.md +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/CODEOWNERS +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/CONTRIBUTING.md +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/LICENSE +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/MANIFEST.in +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/README.rst +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/__init__.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/12AX.cluster_map +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/_setup_returnn_env.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-fwd.config +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-horovod-mpi.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-horovod-mpi.py.sh +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-horovod-mpi.sh +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-hyper-param-tuning.config +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-iter-dataset.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-list-devices.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-lua-torch-layer.config +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-pretrain.config +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-record-and-push-to-webserver.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-returnn-as-framework.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-rf-pt-benchmark.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-rf.config +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-rhn-enwik8.config +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-sprint-interface.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-tf-att-copy.config +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-tf-attention.config +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-tf-chunking-blstm.12ax.config +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-tf-contribrnn-lstm.12ax.config +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-tf-enc-dec.config +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-tf-hard-att-copy.config +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-tf-lstm-benchmark.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-tf-maxgradnorm-lstm.12ax.config +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-tf-native-lstm-lowmem.12ax.config +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-tf-native-lstm.12ax.config +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-tf-native-lstm2.12ax.config +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-tf-native-lstm2.12ax.tuned.config +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-tf-neural-transducer.12ax.config +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-tf-rec-explicit-lstm.config +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-tf-rec-explicit-rnn.config +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-tf-rec-self-att.config +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-tf-search-compiled-graph.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-tf-vanilla-lstm.12ax.config +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-timit-lstm-ctc.config +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-torch.config +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-upd-mult-model.lstm.12ax.config +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo.sh +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/mdlstm/IAM/IAM_lines/a01-000u-00.png +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/mdlstm/IAM/IAM_lines/a01-007-04.png +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/mdlstm/IAM/IAM_lines/a01-007-06.png +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/mdlstm/IAM/README.txt +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/mdlstm/IAM/chars.txt +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/mdlstm/IAM/config_demo +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/mdlstm/IAM/config_fwd +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/mdlstm/IAM/config_real +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/mdlstm/IAM/create_IAM_dataset.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/mdlstm/IAM/decode.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/mdlstm/IAM/features/raw/demo.h5 +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/mdlstm/IAM/go.sh +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/mdlstm/IAM/lines.txt +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/mdlstm/IAM/split/eval.txt +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/mdlstm/IAM/split/train.txt +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/mdlstm/IAM/split/valid.txt +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/mdlstm/README.md +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/mdlstm/artificial/create_test_h5.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/mdlstm/artificial/forwardconfig +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/mdlstm/artificial/go.sh +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/mdlstm/artificial/trainconfig +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/mdlstm/artificial_rgb/create_test_h5.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/mdlstm/artificial_rgb/forwardconfig +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/mdlstm/artificial_rgb/go.sh +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/mdlstm/artificial_rgb/trainconfig +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/pyproject.toml +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/requirements.txt +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/__init__.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/__main__.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/__old_mod_loader__.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/__setup__.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/config.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/datasets/__init__.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/datasets/audio.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/datasets/basic.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/datasets/bundle_file.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/datasets/cached.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/datasets/cached2.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/datasets/generating.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/datasets/hdf.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/datasets/lm.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/datasets/map.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/datasets/meta.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/datasets/multi_proc.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/datasets/normalization_data.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/datasets/numpy_dump.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/datasets/raw_wav.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/datasets/sprint.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/datasets/stereo.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/datasets/util/__init__.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/datasets/util/feature_extraction.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/datasets/util/strings.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/datasets/util/vocabulary.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/engine/__init__.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/engine/base.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/engine/batch.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/extern/WarpRna/__init__.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/extern/WarpRna/__main__.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/extern/WarpRna/warp-rna/.git +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/extern/WarpRna/warp-rna/.gitignore +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/extern/WarpRna/warp-rna/LICENSE +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/extern/WarpRna/warp-rna/README.md +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/extern/WarpRna/warp-rna/aligner.gif +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/extern/WarpRna/warp-rna/check.png +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/extern/WarpRna/warp-rna/core.cu +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/extern/WarpRna/warp-rna/core.h +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/extern/WarpRna/warp-rna/core_cpu.cpp +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/extern/WarpRna/warp-rna/pytorch_binding/LICENSE +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/extern/WarpRna/warp-rna/pytorch_binding/MANIFEST.in +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/extern/WarpRna/warp-rna/pytorch_binding/README.md +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/extern/WarpRna/warp-rna/pytorch_binding/binding.cpp +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/extern/WarpRna/warp-rna/pytorch_binding/core.cu +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/extern/WarpRna/warp-rna/pytorch_binding/core.h +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/extern/WarpRna/warp-rna/pytorch_binding/requirements.txt +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/extern/WarpRna/warp-rna/pytorch_binding/setup.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/extern/WarpRna/warp-rna/pytorch_binding/warp_rna/__init__.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/extern/WarpRna/warp-rna/pytorch_binding/warp_rna/test.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/extern/WarpRna/warp-rna/ref_rna.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/extern/WarpRna/warp-rna/tensorflow_binding/setup.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/extern/WarpRna/warp-rna/tensorflow_binding/src/warp_rna_op.cc +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/extern/WarpRna/warp-rna/tensorflow_binding/src/warp_rna_op_kernel_tmpl.h +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/extern/WarpRna/warp-rna/tensorflow_binding/warp_rna/__init__.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/extern/WarpRna/warp-rna/test.cpp +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/extern/__init__.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/extern/graph_editor/README.md +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/extern/graph_editor/__init__.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/extern/graph_editor/edit.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/extern/graph_editor/reroute.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/extern/graph_editor/select.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/extern/graph_editor/subgraph.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/extern/graph_editor/transform.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/extern/graph_editor/util.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/forward_iface.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/__init__.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/_backend.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/_native/__init__.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/_native/backend.cpp +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/_native/backend.hpp +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/_native/module.cpp +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/_native/module.hpp +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/_native/py_utils.hpp +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/_native/tensor_ops.cpp +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/_native/tensor_ops.hpp +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/_numpy_backend.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/_random_journal.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/_utils.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/array_.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/audio/__init__.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/audio/mel.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/audio/specaugment.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/backend.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/cond.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/const.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/container.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/control_flow_ctx.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/conv.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/device.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/dims.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/dropout.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/dtype.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/encoder/__init__.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/encoder/base.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/encoder/conformer.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/gradient.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/graph.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/init.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/label_smoothing.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/linear.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/loop.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/loss.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/math_.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/matmul.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/module.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/normalization.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/parameter.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/rand.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/rec.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/reduce.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/run_ctx.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/signal.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/state.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/tensor_array.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/frontend/types.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/import_/__init__.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/import_/common.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/import_/git.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/import_/import_.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/learning_rate_control.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/log.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/native_op.cpp +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/native_op.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/pretrain.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/sprint/__init__.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/sprint/cache.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/sprint/control.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/sprint/error_signals.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/sprint/extern_interface.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/sprint/interface.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tensor/README.md +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tensor/__init__.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tensor/_dim_extra.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tensor/_tensor_extra.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tensor/_tensor_mixin_base.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tensor/_tensor_op_overloads.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tensor/control_flow_ctx.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tensor/dim.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tensor/marked_dim.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tensor/tensor.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tensor/tensor_dict.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tensor/utils.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tf/__init__.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tf/compat.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tf/data_pipeline.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tf/distributed.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tf/engine.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tf/frontend_layers/README.md +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tf/frontend_layers/__init__.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tf/frontend_layers/_backend.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tf/frontend_layers/_utils.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tf/frontend_layers/cond.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tf/frontend_layers/config_entry_points.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tf/frontend_layers/debug_eager_mode.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tf/frontend_layers/dims.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tf/frontend_layers/layer.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tf/frontend_layers/loop.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tf/frontend_layers/make_layer.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tf/frontend_layers/masked_computation.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tf/frontend_layers/parameter_assign.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tf/frontend_layers/prev_tensor_ref.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tf/frontend_low_level/__init__.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tf/frontend_low_level/_backend.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tf/horovod.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tf/hyper_param_tuning.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tf/layers/__init__.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tf/layers/base.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tf/layers/basic.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tf/layers/rec.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tf/layers/segmental_model.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tf/layers/signal_processing.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tf/layers/variable.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tf/native_op.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tf/network.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tf/sprint.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tf/updater.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tf/util/__init__.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tf/util/basic.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tf/util/data.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tf/util/gradient_checkpoint.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tf/util/ken_lm.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/tf/util/open_fst.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/torch/README.md +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/torch/__init__.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/torch/data/__init__.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/torch/data/extern_data.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/torch/data/pipeline.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/torch/data/returnn_dataset_wrapper.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/torch/data/tensor_utils.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/torch/distributed.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/torch/engine.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/torch/frontend/__init__.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/torch/frontend/_backend.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/torch/frontend/_rand.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/torch/frontend/bridge.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/torch/frontend/raw_ops.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/torch/updater.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/torch/util/README.md +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/torch/util/__init__.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/torch/util/diagnose_gpu.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/torch/util/scaled_gradient.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/util/__init__.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/util/basic.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/util/better_exchook.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/util/bpe.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/util/debug.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/util/debug_helpers.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/util/fsa.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/util/literal_py_to_pickle.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/util/math.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/util/native_code_compiler.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/util/pprint.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/util/py-to-pickle.cpp +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/util/py_compat.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/util/py_ext_mod_compiler.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/util/result_with_reason.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/util/sig_proc.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/util/task_system.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/util/watch_memory.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn.egg-info/dependency_links.txt +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn.egg-info/top_level.txt +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/rnn.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/setup.cfg +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/setup.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/DummySprintExec.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/PyCharm-inspection-profile.xml +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/PyCharm.idea/.gitignore +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/PyCharm.idea/.name +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/PyCharm.idea/codeStyleSettings.xml +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/PyCharm.idea/codeStyles/Project.xml +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/PyCharm.idea/codeStyles/codeStyleConfig.xml +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/PyCharm.idea/inspectionProfiles/Project_Default.xml +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/PyCharm.idea/inspectionProfiles/profiles_settings.xml +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/PyCharm.idea/misc.xml +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/PyCharm.idea/modules.xml +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/PyCharm.idea/returnn.iml +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/PyCharm.idea/scopes/scope_settings.xml +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/_set_num_threads1.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/_setup_returnn_env.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/_setup_test_env.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/bpe-unicode-demo.codes +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/bpe-unicode-demo.vocab +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/lexicon_opt.fst +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/lexicon_opt.isyms +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/lexicon_opt.jpg +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/lexicon_opt.osyms +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/lint_common.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/pycharm-inspect.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/pylint.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/returnn-as-framework.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/rf_utils.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/spelling.dic +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_Config.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_Dataset.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_Fsa.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_GeneratingDataset.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_HDFDataset.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_LearningRateControl.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_Log.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_MultiProcDataset.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_Pretrain.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_ResNet.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_SprintDataset.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_SprintInterface.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_TFEngine.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_TFNativeOp.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_TFNetworkLayer.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_TFNetworkRecLayer.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_TFNetworkSigProcLayer.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_TFUpdater.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_TFUtil.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_TF_determinism.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_TaskSystem.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_TaskSystem_SharedMem.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_TranslationDataset.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_Util.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_demos.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_fork_exec.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_hdf_dump.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_rf_array.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_rf_attention.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_rf_base.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_rf_cond.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_rf_const.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_rf_container.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_rf_conv.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_rf_encoder_conformer.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_rf_gradient.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_rf_label_smoothing.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_rf_loop.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_rf_math.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_rf_normalization.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_rf_rec.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_rf_reduce.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_rf_signal.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_tensor.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_tools.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_torch_dataset.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_torch_engine.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_torch_frontend.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tests/test_torch_internal_frontend.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/_setup_returnn_env.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/analyze-dataset-batches.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/bliss-collect-seq-lens.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/bliss-dump-text.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/bliss-get-segment-names.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/bliss-to-ogg-zip.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/bpe-create-lexicon.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/calculate-word-error-rate.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/cleanup-old-models.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/collect-orth-symbols.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/collect-words.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/compile_native_op.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/compile_tf_graph.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/debug-dump-search-scores.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/debug-plot-search-scores.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/dump-dataset-raw-strings.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/dump-dataset.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/dump-forward-stats.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/dump-forward.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/dump-network-json.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/dump-pickle.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/extract_state_tying_from_dataset.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/get-attention-weights.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/get-best-model-epoch.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/hdf_dump.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/hdf_dump_translation_dataset.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/import-blocks-mt-model.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/import-t2t-mt-model.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/lattice_rescorer/.gitignore +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/lattice_rescorer/Makefile +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/lattice_rescorer/README.md +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/lattice_rescorer/example/README.md +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/lattice_rescorer/example/libs_list +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/lattice_rescorer/example/network.040/i600_m600_m600.sgd_b16_lr0_cl2.newbobabs.config +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/lattice_rescorer/example/network.040/i600_m600_m600.sgd_b16_lr0_cl2.newbobabs.keep_over_epoch.lstm2.config +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/lattice_rescorer/example/rescore_lattice.sh +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/lattice_rescorer/example/state_vars_list +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/lattice_rescorer/example/tensor_names_list +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/lattice_rescorer/file.h +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/lattice_rescorer/htklatticerescorer.cc +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/lattice_rescorer/htklatticerescorer.h +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/lattice_rescorer/main.cc +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/lattice_rescorer/rescorer.h +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/lattice_rescorer/vocabulary.cc +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/lattice_rescorer/vocabulary.h +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/tf_avg_checkpoints.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/tf_inspect_checkpoint.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/tf_inspect_summary_log.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/torch_export_to_onnx.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/torch_inspect_checkpoint.py +0 -0
- {returnn-1.20231220.31131 → returnn-1.20231220.85618}/tools/torch_inspect_checkpoint_and_opt.py +0 -0
|
@@ -374,6 +374,11 @@ class RelPosSelfAttention(SelfAttentionBase):
|
|
|
374
374
|
output = self.proj(output)
|
|
375
375
|
return output
|
|
376
376
|
|
|
377
|
+
# provide this func for compat with some existing code
|
|
378
|
+
@staticmethod
|
|
379
|
+
def _rel_shift(x: Tensor, axis: Dim, pos_emb_spatial_dim: Dim, hist_dim: Dim) -> Tensor:
|
|
380
|
+
return _rel_pos_enc_shift(x, axis, pos_emb_spatial_dim, hist_dim)
|
|
381
|
+
|
|
377
382
|
|
|
378
383
|
def _rel_pos_enc_shift(x: Tensor, axis: Dim, pos_emb_spatial_dim: Dim, hist_dim: Dim) -> Tensor:
|
|
379
384
|
"""
|
|
@@ -0,0 +1,281 @@
|
|
|
1
|
+
"""
|
|
2
|
+
(Label-sync) Transformer decoder, including cross attention to encoder
|
|
3
|
+
|
|
4
|
+
References:
|
|
5
|
+
|
|
6
|
+
(Original paper of course)
|
|
7
|
+
https://pytorch.org/docs/stable/_modules/torch/nn/modules/transformer.html#Transformer
|
|
8
|
+
https://github.com/pytorch-labs/gpt-fast
|
|
9
|
+
https://github.com/karpathy/minGPT/blob/master/mingpt/model.py
|
|
10
|
+
https://github.com/karpathy/nanoGPT/blob/master/model.py
|
|
11
|
+
https://github.com/facebookresearch/fairseq/blob/main/fairseq/models/transformer/transformer_decoder.py
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
from __future__ import annotations
|
|
15
|
+
from typing import Optional, Any, Union, Tuple, Dict, Callable, Sequence
|
|
16
|
+
import functools
|
|
17
|
+
import copy as _copy
|
|
18
|
+
from returnn.util.basic import NotSpecified
|
|
19
|
+
import returnn.frontend as rf
|
|
20
|
+
from returnn.tensor import Tensor, Dim, single_step_dim
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class TransformerDecoder(rf.Module):
|
|
24
|
+
"""
|
|
25
|
+
Represents Transformer decoder architecture
|
|
26
|
+
"""
|
|
27
|
+
|
|
28
|
+
def __init__(
|
|
29
|
+
self,
|
|
30
|
+
vocab_dim: Dim,
|
|
31
|
+
model_dim: Dim = Dim(512, name="transformer-dec-default-model-dim"),
|
|
32
|
+
*,
|
|
33
|
+
num_layers: int,
|
|
34
|
+
ff_dim: Dim = NotSpecified,
|
|
35
|
+
ff_activation: Callable[[Tensor], Tensor] = rf.relu,
|
|
36
|
+
dropout: float = 0.1,
|
|
37
|
+
num_heads: int = 8,
|
|
38
|
+
att_dropout: float = 0.1,
|
|
39
|
+
decoder_layer: Optional[Union[TransformerDecoderLayer, rf.Module, type, Any]] = None,
|
|
40
|
+
decoder_layer_opts: Optional[Dict[str, Any]] = None,
|
|
41
|
+
share_embedding: bool = False,
|
|
42
|
+
):
|
|
43
|
+
"""
|
|
44
|
+
:param vocab_dim:
|
|
45
|
+
:param model_dim: the output feature dimension
|
|
46
|
+
:param num_layers: the number of encoder layers
|
|
47
|
+
:param ff_dim: the dimension of feed-forward layers. 2048 originally, or 4 times out_dim
|
|
48
|
+
:param ff_activation: activation function for feed-forward network
|
|
49
|
+
:param dropout: the dropout value for the FF block
|
|
50
|
+
:param num_heads: the number of attention heads
|
|
51
|
+
:param att_dropout: attention dropout value
|
|
52
|
+
:param decoder_layer: an instance of :class:`TransformerDecoderLayer` or similar
|
|
53
|
+
:param decoder_layer_opts: options for the encoder layer
|
|
54
|
+
:param share_embedding:
|
|
55
|
+
"""
|
|
56
|
+
super().__init__()
|
|
57
|
+
|
|
58
|
+
self.vocab_dim = vocab_dim
|
|
59
|
+
self.model_dim = model_dim
|
|
60
|
+
|
|
61
|
+
# We could make this optional or configurable if we ever need to.
|
|
62
|
+
# Or maybe you would just have another separate implementation of this module then...
|
|
63
|
+
self.input_embedding = rf.Embedding(vocab_dim, model_dim)
|
|
64
|
+
|
|
65
|
+
# This could also be configurable...
|
|
66
|
+
self.pos_enc = functools.partial(
|
|
67
|
+
rf.sinusoidal_positional_encoding, feat_dim=model_dim, dtype=self.input_embedding.weight.dtype
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
if not decoder_layer or isinstance(decoder_layer, type):
|
|
71
|
+
decoder_layer_opts_ = dict(
|
|
72
|
+
out_dim=model_dim,
|
|
73
|
+
ff_dim=ff_dim,
|
|
74
|
+
ff_activation=ff_activation,
|
|
75
|
+
dropout=dropout,
|
|
76
|
+
num_heads=num_heads,
|
|
77
|
+
att_dropout=att_dropout,
|
|
78
|
+
)
|
|
79
|
+
if decoder_layer_opts:
|
|
80
|
+
decoder_layer_opts_.update(decoder_layer_opts)
|
|
81
|
+
if not decoder_layer:
|
|
82
|
+
decoder_layer = TransformerDecoderLayer(**decoder_layer_opts_)
|
|
83
|
+
elif isinstance(decoder_layer, type):
|
|
84
|
+
decoder_layer = decoder_layer(**decoder_layer_opts_)
|
|
85
|
+
else:
|
|
86
|
+
raise TypeError(f"unexpected decoder_layer {decoder_layer!r}")
|
|
87
|
+
|
|
88
|
+
self.layers = rf.Sequential(_copy.deepcopy(decoder_layer) for _ in range(num_layers))
|
|
89
|
+
|
|
90
|
+
self.final_layer_norm = rf.LayerNorm(model_dim)
|
|
91
|
+
|
|
92
|
+
self.logits = rf.Linear(model_dim, vocab_dim, with_bias=False)
|
|
93
|
+
|
|
94
|
+
if share_embedding:
|
|
95
|
+
self.logits.weight = self.input_embedding.weight
|
|
96
|
+
|
|
97
|
+
def default_initial_state(self, *, batch_dims: Sequence[Dim]) -> rf.State:
|
|
98
|
+
"""default initial state"""
|
|
99
|
+
state = rf.State({k: v.default_initial_state(batch_dims=batch_dims) for k, v in self.layers.items()})
|
|
100
|
+
state.pos = rf.zeros((), dtype="int32")
|
|
101
|
+
return state
|
|
102
|
+
|
|
103
|
+
def transform_encoder(self, encoder: Tensor, *, axis: Dim) -> rf.State:
|
|
104
|
+
"""
|
|
105
|
+
Transform encoder output.
|
|
106
|
+
Note that the Transformer decoder usually expects that layer-norm was applied already on the encoder output.
|
|
107
|
+
"""
|
|
108
|
+
return rf.State({k: v.transform_encoder(encoder, axis=axis) for k, v in self.layers.items()})
|
|
109
|
+
|
|
110
|
+
def __call__(
|
|
111
|
+
self,
|
|
112
|
+
source: Tensor,
|
|
113
|
+
*,
|
|
114
|
+
spatial_dim: Dim,
|
|
115
|
+
state: rf.State,
|
|
116
|
+
encoder: rf.State,
|
|
117
|
+
collected_outputs: Optional[Dict[str, Tensor]] = None,
|
|
118
|
+
) -> Tuple[Tensor, rf.State]:
|
|
119
|
+
"""
|
|
120
|
+
forward, single step or whole sequence
|
|
121
|
+
|
|
122
|
+
:param source:
|
|
123
|
+
:param spatial_dim: single_step_dim or spatial dim of source
|
|
124
|
+
:param state: e.g. via :func:`default_initial_state`
|
|
125
|
+
:param encoder: via :func:`transform_encoder`
|
|
126
|
+
:param collected_outputs:
|
|
127
|
+
"""
|
|
128
|
+
new_state = rf.State()
|
|
129
|
+
|
|
130
|
+
decoded = self.input_embedding(source)
|
|
131
|
+
decoded = decoded + self.pos_enc(spatial_dim=spatial_dim, offset=state.pos)
|
|
132
|
+
|
|
133
|
+
new_state.pos = state.pos + (1 if spatial_dim == single_step_dim else spatial_dim.get_size_tensor())
|
|
134
|
+
|
|
135
|
+
for layer_name, layer in self.layers.items():
|
|
136
|
+
layer: TransformerDecoderLayer # or similar
|
|
137
|
+
decoded, new_state[layer_name] = layer(
|
|
138
|
+
decoded, spatial_dim=spatial_dim, state=state[layer_name], encoder=encoder[layer_name]
|
|
139
|
+
)
|
|
140
|
+
if collected_outputs is not None:
|
|
141
|
+
collected_outputs[layer_name] = decoded
|
|
142
|
+
|
|
143
|
+
decoded = self.final_layer_norm(decoded)
|
|
144
|
+
logits = self.logits(decoded)
|
|
145
|
+
|
|
146
|
+
return logits, new_state
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
class TransformerDecoderLayer(rf.Module):
|
|
150
|
+
"""
|
|
151
|
+
Represents a conformer block
|
|
152
|
+
"""
|
|
153
|
+
|
|
154
|
+
def __init__(
|
|
155
|
+
self,
|
|
156
|
+
encoder_dim: Dim,
|
|
157
|
+
out_dim: Dim = Dim(512, name="transformer-dec-default-out-dim"),
|
|
158
|
+
*,
|
|
159
|
+
ff_dim: Dim = NotSpecified,
|
|
160
|
+
ff_activation: Callable[[Tensor], Tensor] = rf.relu,
|
|
161
|
+
dropout: float = 0.1,
|
|
162
|
+
num_heads: int = 8,
|
|
163
|
+
self_att: Optional[Union[rf.CausalSelfAttention, rf.RelPosCausalSelfAttention, rf.Module, type, Any]] = None,
|
|
164
|
+
self_att_opts: Optional[Dict[str, Any]] = None,
|
|
165
|
+
att_dropout: float = 0.1,
|
|
166
|
+
):
|
|
167
|
+
"""
|
|
168
|
+
:param encoder_dim:
|
|
169
|
+
:param out_dim: the output feature dimension
|
|
170
|
+
:param ff_dim: the dimension of feed-forward layers. 2048 originally, or 4 times out_dim
|
|
171
|
+
:param ff_activation: activation function for feed-forward network
|
|
172
|
+
:param dropout: the dropout value for the FF block
|
|
173
|
+
:param num_heads: the number of attention heads
|
|
174
|
+
:param self_att: the self-attention layer. RelPosSelfAttention originally and default
|
|
175
|
+
:param self_att_opts: options for the self-attention layer, for :class:`nn.RelPosSelfAttention`
|
|
176
|
+
:param att_dropout: attention dropout value
|
|
177
|
+
"""
|
|
178
|
+
super().__init__()
|
|
179
|
+
|
|
180
|
+
self.encoder_dim = encoder_dim
|
|
181
|
+
self.dropout = dropout
|
|
182
|
+
self.dropout_broadcast = rf.dropout_broadcast_default()
|
|
183
|
+
self.out_dim = out_dim
|
|
184
|
+
|
|
185
|
+
if ff_dim is None:
|
|
186
|
+
ff_dim = 4 * out_dim
|
|
187
|
+
self.ff = FeedForward(out_dim=out_dim, ff_dim=ff_dim, dropout=dropout, activation=ff_activation)
|
|
188
|
+
self.ff_layer_norm = rf.LayerNorm(out_dim)
|
|
189
|
+
|
|
190
|
+
if self_att is None or isinstance(self_att, type):
|
|
191
|
+
self_att_opts_ = dict(
|
|
192
|
+
in_dim=out_dim,
|
|
193
|
+
proj_dim=out_dim,
|
|
194
|
+
key_dim_total=out_dim,
|
|
195
|
+
value_dim_total=out_dim,
|
|
196
|
+
num_heads=num_heads,
|
|
197
|
+
att_dropout=att_dropout,
|
|
198
|
+
)
|
|
199
|
+
if self_att_opts:
|
|
200
|
+
self_att_opts_.update(self_att_opts)
|
|
201
|
+
if self_att is None:
|
|
202
|
+
self.self_att = rf.CausalSelfAttention(**self_att_opts_)
|
|
203
|
+
else:
|
|
204
|
+
self.self_att = self_att(**self_att_opts_)
|
|
205
|
+
else:
|
|
206
|
+
self.self_att = self_att
|
|
207
|
+
self.self_att_layer_norm = rf.LayerNorm(out_dim)
|
|
208
|
+
|
|
209
|
+
self.cross_att = rf.CrossAttention(
|
|
210
|
+
encoder_dim=self.encoder_dim,
|
|
211
|
+
query_in_dim=out_dim,
|
|
212
|
+
proj_dim=out_dim,
|
|
213
|
+
key_dim_total=out_dim,
|
|
214
|
+
value_dim_total=out_dim,
|
|
215
|
+
num_heads=num_heads,
|
|
216
|
+
att_dropout=att_dropout,
|
|
217
|
+
)
|
|
218
|
+
self.cross_att_layer_norm = rf.LayerNorm(out_dim)
|
|
219
|
+
|
|
220
|
+
def default_initial_state(self, *, batch_dims: Sequence[Dim]) -> rf.State:
|
|
221
|
+
"""default initial state"""
|
|
222
|
+
return rf.State(self_att=self.self_att.default_initial_state(batch_dims=batch_dims))
|
|
223
|
+
|
|
224
|
+
def transform_encoder(self, encoder: Tensor, *, axis: Dim) -> rf.State:
|
|
225
|
+
"""Transform the encoder output."""
|
|
226
|
+
return rf.State(cross_att=self.cross_att.transform_encoder(encoder, axis=axis))
|
|
227
|
+
|
|
228
|
+
def __call__(self, inp: Tensor, *, spatial_dim: Dim, state: rf.State, encoder: rf.State) -> Tuple[Tensor, rf.State]:
|
|
229
|
+
"""forward"""
|
|
230
|
+
# (multi-head) self-attention (MHSA or simply SA)
|
|
231
|
+
new_state = rf.State()
|
|
232
|
+
x_sa_ln = self.self_att_layer_norm(inp)
|
|
233
|
+
x_sa, new_state.self_att = self.self_att(x_sa_ln, axis=spatial_dim, state=state.self_att)
|
|
234
|
+
x_sa = rf.dropout(x_sa, self.dropout, axis=self.dropout_broadcast and self.out_dim)
|
|
235
|
+
x_sa_out = x_sa + inp
|
|
236
|
+
|
|
237
|
+
# (multi-head) cross-attention (CA)
|
|
238
|
+
x_ca_ln = self.cross_att_layer_norm(x_sa_out)
|
|
239
|
+
x_ca = self.cross_att(x_ca_ln, encoder)
|
|
240
|
+
x_ca = rf.dropout(x_ca, self.dropout, axis=self.dropout_broadcast and self.out_dim)
|
|
241
|
+
x_ca_out = x_ca + x_sa_out
|
|
242
|
+
|
|
243
|
+
# feed-forward (FF)
|
|
244
|
+
x_ff_ln = self.ff_layer_norm(x_ca_out)
|
|
245
|
+
x_ff = self.ff(x_ff_ln)
|
|
246
|
+
x_ff = rf.dropout(x_ff, self.dropout, axis=self.dropout_broadcast and self.out_dim)
|
|
247
|
+
x_ff_out = x_ff + x_ca_out
|
|
248
|
+
|
|
249
|
+
return x_ff_out, new_state
|
|
250
|
+
|
|
251
|
+
|
|
252
|
+
class FeedForward(rf.Module):
|
|
253
|
+
"""
|
|
254
|
+
Conformer position-wise feedforward neural network layer
|
|
255
|
+
FF -> Activation -> Dropout -> FF
|
|
256
|
+
"""
|
|
257
|
+
|
|
258
|
+
def __init__(self, out_dim: Dim, *, ff_dim: Dim, dropout: float, activation: Callable[[Tensor], Tensor]):
|
|
259
|
+
"""
|
|
260
|
+
:param out_dim: output feature dimension
|
|
261
|
+
:param ff_dim: dimension of the feed-forward layers
|
|
262
|
+
:param dropout: dropout value
|
|
263
|
+
:param activation: activation function
|
|
264
|
+
"""
|
|
265
|
+
super().__init__()
|
|
266
|
+
|
|
267
|
+
self.out_dim = out_dim
|
|
268
|
+
self.dropout = dropout
|
|
269
|
+
self.dropout_broadcast = rf.dropout_broadcast_default()
|
|
270
|
+
self.activation = activation
|
|
271
|
+
|
|
272
|
+
self.linear_ff = rf.Linear(out_dim, ff_dim)
|
|
273
|
+
self.linear_out = rf.Linear(ff_dim, out_dim)
|
|
274
|
+
|
|
275
|
+
def __call__(self, inp: Tensor) -> Tensor:
|
|
276
|
+
"""forward"""
|
|
277
|
+
x_ff1 = self.linear_ff(inp)
|
|
278
|
+
x_act = self.activation(x_ff1)
|
|
279
|
+
x_drop = rf.dropout(x_act, self.dropout, axis=self.dropout_broadcast and self.linear_ff.out_dim)
|
|
280
|
+
x_ff2 = self.linear_out(x_drop)
|
|
281
|
+
return x_ff2
|
|
@@ -201,6 +201,8 @@ returnn/frontend/_native/tensor_ops.hpp
|
|
|
201
201
|
returnn/frontend/audio/__init__.py
|
|
202
202
|
returnn/frontend/audio/mel.py
|
|
203
203
|
returnn/frontend/audio/specaugment.py
|
|
204
|
+
returnn/frontend/decoder/__init__.py
|
|
205
|
+
returnn/frontend/decoder/transformer.py
|
|
204
206
|
returnn/frontend/encoder/__init__.py
|
|
205
207
|
returnn/frontend/encoder/base.py
|
|
206
208
|
returnn/frontend/encoder/conformer.py
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-record-and-push-to-webserver.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-tf-chunking-blstm.12ax.config
RENAMED
|
File without changes
|
{returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-tf-contribrnn-lstm.12ax.config
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-tf-maxgradnorm-lstm.12ax.config
RENAMED
|
File without changes
|
{returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-tf-native-lstm-lowmem.12ax.config
RENAMED
|
File without changes
|
|
File without changes
|
{returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-tf-native-lstm2.12ax.config
RENAMED
|
File without changes
|
{returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-tf-native-lstm2.12ax.tuned.config
RENAMED
|
File without changes
|
{returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-tf-neural-transducer.12ax.config
RENAMED
|
File without changes
|
{returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-tf-rec-explicit-lstm.config
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
{returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-tf-search-compiled-graph.py
RENAMED
|
File without changes
|
{returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-tf-vanilla-lstm.12ax.config
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
{returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/demo-upd-mult-model.lstm.12ax.config
RENAMED
|
File without changes
|
|
File without changes
|
{returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/mdlstm/IAM/IAM_lines/a01-000u-00.png
RENAMED
|
File without changes
|
{returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/mdlstm/IAM/IAM_lines/a01-007-04.png
RENAMED
|
File without changes
|
{returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/mdlstm/IAM/IAM_lines/a01-007-06.png
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/mdlstm/IAM/create_IAM_dataset.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/mdlstm/artificial/create_test_h5.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/mdlstm/artificial_rgb/create_test_h5.py
RENAMED
|
File without changes
|
{returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/mdlstm/artificial_rgb/forwardconfig
RENAMED
|
File without changes
|
|
File without changes
|
{returnn-1.20231220.31131 → returnn-1.20231220.85618}/demos/mdlstm/artificial_rgb/trainconfig
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/datasets/normalization_data.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/datasets/util/feature_extraction.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/extern/WarpRna/warp-rna/.gitignore
RENAMED
|
File without changes
|
{returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/extern/WarpRna/warp-rna/LICENSE
RENAMED
|
File without changes
|
{returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/extern/WarpRna/warp-rna/README.md
RENAMED
|
File without changes
|
{returnn-1.20231220.31131 → returnn-1.20231220.85618}/returnn/extern/WarpRna/warp-rna/aligner.gif
RENAMED
|
File without changes
|