spikezoo 0.2.3.5__tar.gz → 0.2.3.7__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (212) hide show
  1. spikezoo-0.2.3.7/PKG-INFO +151 -0
  2. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/bsf/models/bsf/__pycache__/align.cpython-39.pyc +0 -0
  3. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/bsf/models/bsf/__pycache__/bsf.cpython-39.pyc +0 -0
  4. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/bsf/models/bsf/__pycache__/rep.cpython-39.pyc +0 -0
  5. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/spikeclip/__pycache__/nets.cpython-39.pyc +0 -0
  6. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/ssir/models/__pycache__/layers.cpython-39.pyc +0 -0
  7. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/ssir/models/__pycache__/networks.cpython-39.pyc +0 -0
  8. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/ssml/__pycache__/cbam.cpython-39.pyc +0 -0
  9. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/ssml/__pycache__/model.cpython-39.pyc +0 -0
  10. spikezoo-0.2.3.7/spikezoo/archs/stir/metrics/__pycache__/losses.cpython-39.pyc +0 -0
  11. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/models/__pycache__/Vgg19.cpython-39.pyc +0 -0
  12. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/models/__pycache__/networks_STIR.cpython-39.pyc +0 -0
  13. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/models/__pycache__/submodules.cpython-39.pyc +0 -0
  14. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/models/__pycache__/transformer_new.cpython-39.pyc +0 -0
  15. spikezoo-0.2.3.7/spikezoo/archs/stir/package_core/package_core/__pycache__/__init__.cpython-39.pyc +0 -0
  16. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/package_core/package_core/__pycache__/geometry.cpython-39.pyc +0 -0
  17. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/package_core/package_core/__pycache__/image_proc.cpython-39.pyc +0 -0
  18. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/package_core/package_core/__pycache__/losses.cpython-39.pyc +0 -0
  19. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/package_core/package_core/__pycache__/net_basics.cpython-39.pyc +0 -0
  20. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/tfi/__pycache__/nets.cpython-39.pyc +0 -0
  21. spikezoo-0.2.3.7/spikezoo/archs/tfp/__pycache__/nets.cpython-39.pyc +0 -0
  22. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/wgse/__pycache__/dwtnets.cpython-39.pyc +0 -0
  23. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/wgse/__pycache__/submodules.cpython-39.pyc +0 -0
  24. spikezoo-0.2.3.7/spikezoo/archs/yourmodel/arch/__pycache__/net.cpython-39.pyc +0 -0
  25. spikezoo-0.2.3.7/spikezoo/archs/yourmodel/arch/net.py +35 -0
  26. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/datasets/__init__.py +20 -21
  27. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/datasets/base_dataset.py +25 -19
  28. spikezoo-0.2.3.5/spikezoo/datasets/realworld_dataset.py → spikezoo-0.2.3.7/spikezoo/datasets/realdata_dataset.py +5 -7
  29. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/datasets/reds_base_dataset.py +1 -1
  30. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/datasets/szdata_dataset.py +1 -1
  31. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/datasets/uhsr_dataset.py +1 -1
  32. spikezoo-0.2.3.7/spikezoo/datasets/yourdataset_dataset.py +23 -0
  33. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/models/__init__.py +11 -18
  34. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/models/base_model.py +10 -4
  35. spikezoo-0.2.3.7/spikezoo/models/yourmodel_model.py +22 -0
  36. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/pipeline/base_pipeline.py +17 -10
  37. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/pipeline/ensemble_pipeline.py +2 -1
  38. spikezoo-0.2.3.7/spikezoo/pipeline/train_cfgs.py +70 -0
  39. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/pipeline/train_pipeline.py +14 -14
  40. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/utils/spike_utils.py +1 -1
  41. spikezoo-0.2.3.7/spikezoo.egg-info/PKG-INFO +151 -0
  42. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo.egg-info/SOURCES.txt +5 -2
  43. spikezoo-0.2.3.5/PKG-INFO +0 -258
  44. spikezoo-0.2.3.5/spikezoo/archs/stir/metrics/__pycache__/losses.cpython-39.pyc +0 -0
  45. spikezoo-0.2.3.5/spikezoo/archs/stir/package_core/package_core/__pycache__/__init__.cpython-39.pyc +0 -0
  46. spikezoo-0.2.3.5/spikezoo/archs/tfp/__pycache__/nets.cpython-39.pyc +0 -0
  47. spikezoo-0.2.3.5/spikezoo/data/base/train/spike/203_part4_key_id151.dat +0 -0
  48. spikezoo-0.2.3.5/spikezoo/pipeline/train_cfgs.py +0 -67
  49. spikezoo-0.2.3.5/spikezoo.egg-info/PKG-INFO +0 -258
  50. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/LICENSE.txt +0 -0
  51. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/setup.cfg +0 -0
  52. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/__init__.py +0 -0
  53. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/__init__.py +0 -0
  54. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/__pycache__/__init__.cpython-39.pyc +0 -0
  55. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/base/__pycache__/nets.cpython-39.pyc +0 -0
  56. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/base/nets.py +0 -0
  57. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/bsf/README.md +0 -0
  58. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/bsf/datasets/datasets.py +0 -0
  59. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/bsf/datasets/ds_utils.py +0 -0
  60. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/bsf/main.py +0 -0
  61. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/bsf/metrics/psnr.py +0 -0
  62. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/bsf/metrics/ssim.py +0 -0
  63. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/bsf/models/bsf/align.py +0 -0
  64. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/bsf/models/bsf/bsf.py +0 -0
  65. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/bsf/models/bsf/dsft_convert.py +0 -0
  66. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/bsf/models/bsf/rep.py +0 -0
  67. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/bsf/models/get_model.py +0 -0
  68. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/bsf/prepare_data/DSFT.py +0 -0
  69. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/bsf/prepare_data/crop_dataset_train.py +0 -0
  70. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/bsf/prepare_data/crop_dataset_val.py +0 -0
  71. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/bsf/prepare_data/crop_train.sh +0 -0
  72. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/bsf/prepare_data/crop_val.sh +0 -0
  73. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/bsf/prepare_data/io_utils.py +0 -0
  74. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/bsf/requirements.txt +0 -0
  75. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/bsf/test.py +0 -0
  76. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/bsf/utils.py +0 -0
  77. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/spikeclip/nets.py +0 -0
  78. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/spk2imgnet/.github/workflows/pylint.yml +0 -0
  79. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/spk2imgnet/.gitignore +0 -0
  80. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/spk2imgnet/DCNv2.py +0 -0
  81. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/spk2imgnet/__pycache__/DCNv2.cpython-39.pyc +0 -0
  82. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/spk2imgnet/__pycache__/align_arch.cpython-39.pyc +0 -0
  83. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/spk2imgnet/__pycache__/nets.cpython-39.pyc +0 -0
  84. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/spk2imgnet/align_arch.py +0 -0
  85. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/spk2imgnet/dataset.py +0 -0
  86. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/spk2imgnet/nets.py +0 -0
  87. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/spk2imgnet/readme.md +0 -0
  88. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/spk2imgnet/test_gen_imgseq.py +0 -0
  89. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/spk2imgnet/train.py +0 -0
  90. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/spk2imgnet/utils.py +0 -0
  91. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/ssir/README.md +0 -0
  92. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/ssir/configs/SSIR.yml +0 -0
  93. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/ssir/configs/yml_parser.py +0 -0
  94. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/ssir/datasets/dataset_sreds.py +0 -0
  95. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/ssir/datasets/ds_utils.py +0 -0
  96. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/ssir/losses.py +0 -0
  97. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/ssir/main.py +0 -0
  98. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/ssir/metrics/psnr.py +0 -0
  99. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/ssir/metrics/ssim.py +0 -0
  100. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/ssir/models/Vgg19.py +0 -0
  101. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/ssir/models/layers.py +0 -0
  102. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/ssir/models/networks.py +0 -0
  103. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/ssir/requirements.txt +0 -0
  104. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/ssir/shells/eval_SREDS.sh +0 -0
  105. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/ssir/shells/train_SSIR.sh +0 -0
  106. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/ssir/test.py +0 -0
  107. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/ssir/utils.py +0 -0
  108. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/ssml/cbam.py +0 -0
  109. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/ssml/model.py +0 -0
  110. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/ssml/res.png +0 -0
  111. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/ssml/test.py +0 -0
  112. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/.git-credentials +0 -0
  113. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/README.md +0 -0
  114. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/ckpt_outputs/Descriptions.txt +0 -0
  115. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/configs/STIR.yml +0 -0
  116. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/configs/utils.py +0 -0
  117. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/configs/yml_parser.py +0 -0
  118. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/datasets/dataset_sreds.py +0 -0
  119. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/datasets/ds_utils.py +0 -0
  120. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/eval_SREDS.sh +0 -0
  121. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/main.py +0 -0
  122. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/metrics/losses.py +0 -0
  123. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/metrics/psnr.py +0 -0
  124. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/metrics/ssim.py +0 -0
  125. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/models/Vgg19.py +0 -0
  126. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/models/networks_STIR.py +0 -0
  127. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/models/submodules.py +0 -0
  128. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/models/transformer_new.py +0 -0
  129. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/package_core/build/lib/package_core/__init__.py +0 -0
  130. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/package_core/build/lib/package_core/convertions.py +0 -0
  131. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/package_core/build/lib/package_core/disp_netS.py +0 -0
  132. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/package_core/build/lib/package_core/flow_utils.py +0 -0
  133. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/package_core/build/lib/package_core/generic_train_test.py +0 -0
  134. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/package_core/build/lib/package_core/geometry.py +0 -0
  135. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/package_core/build/lib/package_core/image_proc.py +0 -0
  136. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/package_core/build/lib/package_core/linalg.py +0 -0
  137. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/package_core/build/lib/package_core/losses.py +0 -0
  138. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/package_core/build/lib/package_core/metrics.py +0 -0
  139. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/package_core/build/lib/package_core/model_base.py +0 -0
  140. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/package_core/build/lib/package_core/net_basics.py +0 -0
  141. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/package_core/build/lib/package_core/resnet.py +0 -0
  142. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/package_core/build/lib/package_core/transforms.py +0 -0
  143. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/package_core/build/lib/package_core/utils.py +0 -0
  144. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/package_core/dist/package_core-0.0.0-py3.9.egg +0 -0
  145. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/package_core/package_core/__init__.py +0 -0
  146. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/package_core/package_core/convertions.py +0 -0
  147. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/package_core/package_core/disp_netS.py +0 -0
  148. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/package_core/package_core/flow_utils.py +0 -0
  149. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/package_core/package_core/generic_train_test.py +0 -0
  150. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/package_core/package_core/geometry.py +0 -0
  151. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/package_core/package_core/image_proc.py +0 -0
  152. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/package_core/package_core/linalg.py +0 -0
  153. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/package_core/package_core/losses.py +0 -0
  154. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/package_core/package_core/metrics.py +0 -0
  155. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/package_core/package_core/model_base.py +0 -0
  156. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/package_core/package_core/net_basics.py +0 -0
  157. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/package_core/package_core/resnet.py +0 -0
  158. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/package_core/package_core/transforms.py +0 -0
  159. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/package_core/package_core/utils.py +0 -0
  160. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/package_core/package_core.egg-info/PKG-INFO +0 -0
  161. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/package_core/package_core.egg-info/SOURCES.txt +0 -0
  162. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/package_core/package_core.egg-info/dependency_links.txt +0 -0
  163. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/package_core/package_core.egg-info/top_level.txt +0 -0
  164. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/package_core/setup.py +0 -0
  165. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/requirements.txt +0 -0
  166. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/stir/train_STIR.sh +0 -0
  167. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/tfi/nets.py +0 -0
  168. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/tfp/nets.py +0 -0
  169. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/wgse/README.md +0 -0
  170. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/wgse/dataset.py +0 -0
  171. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/wgse/demo.png +0 -0
  172. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/wgse/demo.py +0 -0
  173. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/wgse/dwtnets.py +0 -0
  174. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/wgse/eval.py +0 -0
  175. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/wgse/logs/WGSE-Dwt1dNet-db8-5-ks3/log.txt +0 -0
  176. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/wgse/submodules.py +0 -0
  177. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/wgse/train.py +0 -0
  178. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/wgse/transform.py +0 -0
  179. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/wgse/utils.py +0 -0
  180. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/archs/wgse/weights/demo.png +0 -0
  181. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/data/base/test/gt/200_part1_key_id151.png +0 -0
  182. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/data/base/test/gt/200_part3_key_id151.png +0 -0
  183. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/data/base/test/gt/203_part1_key_id151.png +0 -0
  184. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/data/base/test/spike/200_part1_key_id151.dat +0 -0
  185. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/data/base/test/spike/200_part3_key_id151.dat +0 -0
  186. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/data/base/test/spike/203_part1_key_id151.dat +0 -0
  187. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/data/base/train/gt/203_part2_key_id151.png +0 -0
  188. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/data/base/train/gt/203_part3_key_id151.png +0 -0
  189. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/data/base/train/gt/203_part4_key_id151.png +0 -0
  190. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/data/base/train/spike/203_part2_key_id151.dat +0 -0
  191. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/data/base/train/spike/203_part3_key_id151.dat +0 -0
  192. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/metrics/__init__.py +0 -0
  193. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/models/bsf_model.py +0 -0
  194. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/models/spikeclip_model.py +0 -0
  195. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/models/spk2imgnet_model.py +0 -0
  196. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/models/ssir_model.py +0 -0
  197. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/models/ssml_model.py +0 -0
  198. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/models/stir_model.py +0 -0
  199. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/models/tfi_model.py +0 -0
  200. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/models/tfp_model.py +0 -0
  201. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/models/wgse_model.py +0 -0
  202. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/pipeline/__init__.py +0 -0
  203. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/utils/__init__.py +0 -0
  204. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/utils/data_utils.py +0 -0
  205. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/utils/img_utils.py +0 -0
  206. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/utils/optimizer_utils.py +0 -0
  207. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/utils/other_utils.py +0 -0
  208. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/utils/scheduler_utils.py +0 -0
  209. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo/utils/vidar_loader.cpython-39-x86_64-linux-gnu.so +0 -0
  210. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo.egg-info/dependency_links.txt +0 -0
  211. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo.egg-info/requires.txt +0 -0
  212. {spikezoo-0.2.3.5 → spikezoo-0.2.3.7}/spikezoo.egg-info/top_level.txt +0 -0
@@ -0,0 +1,151 @@
1
+ Metadata-Version: 2.2
2
+ Name: spikezoo
3
+ Version: 0.2.3.7
4
+ Summary: A deep learning toolbox for spike-to-image models.
5
+ Home-page: https://github.com/chenkang455/Spike-Zoo
6
+ Author: Kang Chen
7
+ Author-email: mrchenkang@stu.pku.edu.cn
8
+ Requires-Python: >=3.7
9
+ Description-Content-Type: text/markdown
10
+ License-File: LICENSE.txt
11
+ Requires-Dist: torch
12
+ Requires-Dist: requests
13
+ Requires-Dist: numpy
14
+ Requires-Dist: tqdm
15
+ Requires-Dist: scikit-image
16
+ Requires-Dist: lpips
17
+ Requires-Dist: pyiqa
18
+ Requires-Dist: opencv-python
19
+ Requires-Dist: thop
20
+ Requires-Dist: pytorch-wavelets
21
+ Requires-Dist: pytz
22
+ Requires-Dist: PyWavelets
23
+ Requires-Dist: pandas
24
+ Requires-Dist: pillow
25
+ Requires-Dist: scikit-learn
26
+ Requires-Dist: scipy
27
+ Requires-Dist: spikingjelly
28
+ Requires-Dist: setuptools
29
+ Dynamic: author
30
+ Dynamic: author-email
31
+ Dynamic: description
32
+ Dynamic: description-content-type
33
+ Dynamic: home-page
34
+ Dynamic: requires-dist
35
+ Dynamic: requires-python
36
+ Dynamic: summary
37
+
38
+ <p align="center">
39
+ <img src="imgs/spike-zoo.png" width="300"/>
40
+ <p>
41
+
42
+ <h5 align="center">
43
+
44
+ [![GitHub repo stars](https://img.shields.io/github/stars/chenkang455/Spike-Zoo?style=flat&logo=github&logoColor=whitesmoke&label=Stars)](https://github.com/chenkang455/Spike-Zoo/stargazers) [![GitHub Issues](https://img.shields.io/github/issues/chenkang455/Spike-Zoo?style=flat&logo=github&logoColor=whitesmoke&label=Issues)](https://github.com/chenkang455/Spike-Zoo/issues) <a href="https://badge.fury.io/py/spikezoo"><img src="https://badge.fury.io/py/spikezoo.svg" alt="PyPI version"></a> <a href='https://spike-zoo.readthedocs.io/zh-cn/latest/index.html'><img src='https://readthedocs.com/projects/plenoptix-nerfstudio/badge/?version=latest' alt='Documentation Status' /></a>[![License](https://img.shields.io/badge/License-MIT-yellow)](https://github.com/chenkang455/Spike-Zoo)
45
+ <p>
46
+
47
+
48
+
49
+ <!-- <h2 align="center">
50
+ <a href="">⚡Spike-Zoo:
51
+ </a>
52
+ </h2> -->
53
+
54
+ ## 📖 About
55
+ ⚡Spike-Zoo is the go-to library for state-of-the-art pretrained **spike-to-image** models designed to reconstruct images from spike streams. Whether you're looking for a simple inference solution or aiming to train your own spike-to-image models, ⚡Spike-Zoo is a modular toolbox that supports both, with key features including:
56
+
57
+ - Fast inference with pre-trained models.
58
+ - Training support for custom-designed spike-to-image models.
59
+ - Specialized functions for processing spike data.
60
+
61
+ > We are highly looking forward to your advice on our project. We welcome any issues or code contributions and will respond within one day.
62
+
63
+ ## 🚩 Updates/Changelog
64
+ * **25-02-02:** Release the `Spike-Zoo v0.2` code, which supports more methods, provide more usages like training your method from scratch.
65
+ * **24-07-19:** Release the `Spike-Zoo v0.1` code for base evaluation of SOTA methods.
66
+
67
+ ## 🍾 Quick Start
68
+ ### 1. Installation
69
+ For users focused on **utilizing pretrained models for spike-to-image conversion**, we recommend installing SpikeZoo using one of the following methods:
70
+
71
+ * Install the last stable version `0.2.3.5` from PyPI:
72
+ ```
73
+ pip install spikezoo
74
+ ```
75
+ * Install the latest developing version `0.2.3.6` from the source code **(recommended)**:
76
+ ```
77
+ git clone https://github.com/chenkang455/Spike-Zoo
78
+ cd Spike-Zoo
79
+ python setup.py install
80
+ ```
81
+
82
+ For users interested in **training their own spike-to-image model based on our framework**, we recommend cloning the repository and modifying the related code directly.
83
+ ```
84
+ git clone https://github.com/chenkang455/Spike-Zoo
85
+ cd Spike-Zoo
86
+ python setup.py develop
87
+ ```
88
+
89
+ ### 2. Inference
90
+ Reconstructing images from the spike is super easy with Spike-Zoo. Try the following code of the single model:
91
+ ``` python
92
+ from spikezoo.pipeline import Pipeline, PipelineConfig
93
+ import spikezoo as sz
94
+ pipeline = Pipeline(
95
+ cfg=PipelineConfig(save_folder="results",version="v023"),
96
+ model_cfg=sz.METHOD.BASE,
97
+ dataset_cfg=sz.DATASET.BASE
98
+ )
99
+ pipeline.infer_from_dataset(idx = 0)
100
+ ```
101
+
102
+
103
+ ### 3. Training
104
+ We provide a user-friendly code for training our provided `BASE` model (modified from the `SpikeCLIP`) for the classic `REDS` dataset introduced in `Spk2ImgNet`:
105
+ ``` python
106
+ from spikezoo.pipeline import TrainPipelineConfig, TrainPipeline
107
+ from spikezoo.datasets.reds_base_dataset import REDS_BASEConfig
108
+ from spikezoo.models.base_model import BaseModelConfig
109
+ pipeline = TrainPipeline(
110
+ cfg=TrainPipelineConfig(save_folder="results", epochs = 10),
111
+ dataset_cfg=REDS_BASEConfig(root_dir = "spikezoo/data/reds_base"),
112
+ model_cfg=BaseModelConfig(),
113
+ )
114
+ pipeline.train()
115
+ ```
116
+ We finish the training with one 4090 GPU in `2 minutes`, achieving `32.8dB` in PSNR and `0.92` in SSIM.
117
+
118
+ > 🌟 We encourage users to develop their models with simple modifications to our framework.
119
+
120
+ ## 📚 How to navigate the documentation
121
+
122
+ | **Link** | **Description** |
123
+ | --- | --- |
124
+ | [Quick Start](https://spike-zoo.readthedocs.io/zh-cn/latest/%E5%BF%AB%E9%80%9F%E5%BC%80%E5%A7%8B.html) | Learn how to quickly get started with the Spike-Zoo repository for inference and training. |
125
+ | [Dataset](https://spike-zoo.readthedocs.io/zh-cn/latest/%E6%95%B0%E6%8D%AE%E9%9B%86.html) | Learn the parameter configuration of datasets and how to construct them. |
126
+ | [Model](https://spike-zoo.readthedocs.io/zh-cn/latest/%E6%A8%A1%E5%9E%8B.html) | Learn the parameter configuration of models and how to construct them. |
127
+ | [Pipeline](https://spike-zoo.readthedocs.io/zh-cn/latest/%E5%A4%84%E7%90%86%E7%AE%A1%E7%BA%BF.html) | Learn how to configure and construct the processing pipeline for models. |
128
+ | [Released Version](https://spike-zoo.readthedocs.io/zh-cn/latest/%E5%8F%91%E8%A1%8C%E7%89%88%E6%9C%AC%E4%BB%8B%E7%BB%8D.html) | Introduces the differences between different release versions of pre-trained weights. |
129
+ | [Examples](https://spike-zoo.readthedocs.io/zh-cn/latest/%E4%BD%BF%E7%94%A8%E4%BE%8B%E5%AD%90.html) | Complete code examples for using Spike-Zoo. |
130
+ | [Supports](https://spike-zoo.readthedocs.io/zh-cn/latest/%E6%94%AF%E6%8C%81%E8%8C%83%E5%9B%B4.html) | Learn about the datasets and models supported by Spike-Zoo. |
131
+
132
+
133
+ ## 📅 TODO
134
+ - [x] Support the overall pipeline for spike simulation.
135
+ - [x] Provide the tutorials.
136
+ - [ ] Support more training settings.
137
+ - [ ] Support more spike-based image reconstruction methods and datasets.
138
+
139
+ ## ✨‍ Acknowledgment
140
+ Our code is built on the open-source projects of [SpikeCV](https://spikecv.github.io/), [IQA-Pytorch](https://github.com/chaofengc/IQA-PyTorch), [BasicSR](https://github.com/XPixelGroup/BasicSR) and [NeRFStudio](https://github.com/nerfstudio-project/nerfstudio).We appreciate the effort of the contributors to these repositories. Thanks for [@zhiwen_huang](https://github.com/hzw-abc), [@ruizhao26](https://github.com/ruizhao26), [@shiyan_chen](https://github.com/hnmizuho) and [@Leozhangjiyuan](https://github.com/Leozhangjiyuan) for their help in building this project.
141
+
142
+ ## 📑 Citation
143
+ If you find our codes helpful to your research, please consider to use the following citation:
144
+ ```
145
+ @misc{spikezoo,
146
+ title={{Spike-Zoo}: A Toolbox for Spike-to-Image Reconstruction},
147
+ author={Kang Chen and Zhiyuan Ye and Tiejun Huang and Zhaofei Yu},
148
+ year={2025},
149
+ howpublished = {\url{https://github.com/chenkang455/Spike-Zoo}},
150
+ }
151
+ ```
@@ -0,0 +1,35 @@
1
+ import torch.nn as nn
2
+
3
+ def conv_layer(inDim, outDim, ks, s, p, norm_layer="none"):
4
+ ## convolutional layer
5
+ conv = nn.Conv2d(inDim, outDim, kernel_size=ks, stride=s, padding=p)
6
+ relu = nn.ReLU(True)
7
+ assert norm_layer in ("batch", "instance", "none")
8
+ if norm_layer == "none":
9
+ seq = nn.Sequential(*[conv, relu])
10
+ else:
11
+ if norm_layer == "instance":
12
+ norm = nn.InstanceNorm2d(outDim, affine=False, track_running_stats=False) # instance norm
13
+ else:
14
+ momentum = 0.1
15
+ norm = nn.BatchNorm2d(outDim, momentum=momentum, affine=True, track_running_stats=True)
16
+ seq = nn.Sequential(*[conv, norm, relu])
17
+ return seq
18
+
19
+
20
+ class YourNet(nn.Module):
21
+ """Borrow the structure from the SpikeCLIP. (https://arxiv.org/abs/2501.04477)"""
22
+
23
+ def __init__(self, inDim=41):
24
+ super(YourNet, self).__init__()
25
+ norm = "none"
26
+ outDim = 1
27
+ convBlock1 = conv_layer(inDim, 64, 3, 1, 1)
28
+ convBlock2 = conv_layer(64, 128, 3, 1, 1, norm)
29
+ convBlock3 = conv_layer(128, 64, 3, 1, 1, norm)
30
+ convBlock4 = conv_layer(64, 16, 3, 1, 1, norm)
31
+ conv = nn.Conv2d(16, outDim, 3, 1, 1)
32
+ self.seq = nn.Sequential(*[convBlock1, convBlock2, convBlock3, convBlock4, conv])
33
+
34
+ def forward(self, x):
35
+ return self.seq(x)
@@ -1,4 +1,5 @@
1
1
  from spikezoo.datasets.base_dataset import BaseDataset, BaseDatasetConfig
2
+
2
3
  from dataclasses import replace
3
4
  import importlib, inspect
4
5
  import os
@@ -12,23 +13,24 @@ dataset_list = [file.replace("_dataset.py", "") for file in files_list if file.e
12
13
 
13
14
 
14
15
  # todo register function
15
- def build_dataset_cfg(cfg: BaseDatasetConfig, split: Literal["train", "test"] = "test"):
16
+ def build_dataset_cfg(cfg: BaseDatasetConfig):
16
17
  """Build the dataset from the given dataset config."""
17
- # build new cfg according to split
18
- cfg = replace(cfg, split=split)
19
18
  # dataset module
20
- module_name = cfg.dataset_name + "_dataset"
21
- assert cfg.dataset_name in dataset_list, f"Given dataset {cfg.dataset_name} not in our dataset list {dataset_list}."
22
- module_name = "spikezoo.datasets." + module_name
23
- module = importlib.import_module(module_name)
24
- # dataset,dataset_config
25
- dataset_name = cfg.dataset_name
26
- dataset_name = dataset_name + "Dataset" if dataset_name == "base" else dataset_name
27
- dataset_cls: BaseDataset = getattr_case_insensitive(module, dataset_name)
19
+ if cfg.dataset_cls_local == None:
20
+ module_name = cfg.dataset_name + "_dataset"
21
+ assert cfg.dataset_name in dataset_list, f"Given dataset {cfg.dataset_name} not in our dataset list {dataset_list}."
22
+ module_name = "spikezoo.datasets." + module_name
23
+ module = importlib.import_module(module_name)
24
+ # dataset,dataset_config
25
+ dataset_name = cfg.dataset_name
26
+ dataset_name = dataset_name + "Dataset" if dataset_name == "base" else dataset_name
27
+ dataset_cls: BaseDataset = getattr_case_insensitive(module, dataset_name)
28
+ else:
29
+ dataset_cls = cfg.dataset_cls_local
28
30
  dataset = dataset_cls(cfg)
29
31
  return dataset
30
32
 
31
- def build_dataset_name(dataset_name: str, split: Literal["train", "test"] = "test"):
33
+ def build_dataset_name(dataset_name: str):
32
34
  """Build the default dataset from the given name."""
33
35
  module_name = dataset_name + "_dataset"
34
36
  assert dataset_name in dataset_list, f"Given dataset {dataset_name} not in our dataset list {dataset_list}."
@@ -37,22 +39,19 @@ def build_dataset_name(dataset_name: str, split: Literal["train", "test"] = "tes
37
39
  # dataset,dataset_config
38
40
  dataset_name = dataset_name + "Dataset" if dataset_name == "base" else dataset_name
39
41
  dataset_cls: BaseDataset = getattr_case_insensitive(module, dataset_name)
40
- dataset_cfg: BaseDatasetConfig = getattr_case_insensitive(module, dataset_name + "config")(split=split)
42
+ dataset_cfg: BaseDatasetConfig = getattr_case_insensitive(module, dataset_name + "config")()
41
43
  dataset = dataset_cls(dataset_cfg)
42
44
  return dataset
43
45
 
44
46
 
45
47
  # todo to modify according to the basicsr
46
- def build_dataloader(dataset: BaseDataset, cfg=None):
48
+ def build_dataloader(dataset, cfg):
47
49
  # train dataloader
48
- if dataset.cfg.split == "train":
49
- if cfg is None:
50
- return torch.utils.data.DataLoader(dataset, batch_size=1, shuffle=False, num_workers=1)
51
- else:
52
- return torch.utils.data.DataLoader(dataset, batch_size=cfg.bs_train, shuffle=True, num_workers=cfg.num_workers, pin_memory=cfg.pin_memory)
50
+ if dataset.split == "train" and cfg._mode == "train_mode":
51
+ return torch.utils.data.DataLoader(dataset, batch_size=cfg.bs_train, shuffle=True, num_workers=cfg.nw_train, pin_memory=cfg.pin_memory)
53
52
  # test dataloader
54
- elif dataset.cfg.split == "test":
55
- return torch.utils.data.DataLoader(dataset, batch_size=1, shuffle=False, num_workers=1)
53
+ else:
54
+ return torch.utils.data.DataLoader(dataset, batch_size=cfg.bs_test, shuffle=False, num_workers=cfg.nw_test,pin_memory=False)
56
55
 
57
56
 
58
57
  # dataset_size_dict = {}
@@ -11,6 +11,7 @@ import warnings
11
11
  import torch
12
12
  from tqdm import tqdm
13
13
  from spikezoo.utils.data_utils import Augmentor
14
+ from typing import Optional
14
15
 
15
16
 
16
17
  @dataclass
@@ -36,24 +37,18 @@ class BaseDatasetConfig:
36
37
  img_dir_name: str = "gt"
37
38
  "Rate. (-1 denotes variant)"
38
39
  rate: float = 0.6
39
-
40
+
40
41
  # ------------- Config -------------
41
- "Dataset split: train/test. Default set as the 'test' for evaluation."
42
- split: Literal["train", "test"] = "test"
43
42
  "Use the data augumentation technique or not."
44
43
  use_aug: bool = False
45
44
  "Use cache mechanism."
46
45
  use_cache: bool = False
47
46
  "Crop size."
48
47
  crop_size: tuple = (-1, -1)
49
-
50
-
51
- # post process
52
- def __post_init__(self):
53
- self.spike_length = self.spike_length_train if self.split == "train" else self.spike_length_test
54
- self.root_dir = Path(self.root_dir) if isinstance(self.root_dir, str) else self.root_dir
55
- # todo try download
56
- assert self.root_dir.exists(), f"No files found in {self.root_dir} for the specified dataset `{self.dataset_name}`."
48
+ "Load the dataset from local or spikezoo lib."
49
+ dataset_cls_local: Optional[Dataset] = None
50
+ "Spike load version. [python,cpp]"
51
+ spike_load_version: Literal["python", "cpp"] = "python"
57
52
 
58
53
 
59
54
  # todo cache mechanism
@@ -61,10 +56,6 @@ class BaseDataset(Dataset):
61
56
  def __init__(self, cfg: BaseDatasetConfig):
62
57
  super(BaseDataset, self).__init__()
63
58
  self.cfg = cfg
64
- self.augmentor = Augmentor(self.cfg.crop_size) if self.cfg.use_aug == True and self.cfg.split == "train" else -1
65
- self.prepare_data()
66
- self.cache_data() if cfg.use_cache == True else -1
67
- warnings.warn("Lengths of the image list and the spike list should be equal.") if len(self.img_list) != len(self.spike_list) else -1
68
59
 
69
60
  def __len__(self):
70
61
  return len(self.spike_list)
@@ -79,7 +70,7 @@ class BaseDataset(Dataset):
79
70
  img = self.get_img(idx)
80
71
 
81
72
  # process data
82
- if self.cfg.use_aug == True and self.cfg.split == "train":
73
+ if self.cfg.use_aug == True and self.split == "train":
83
74
  spike, img = self.augmentor(spike, img)
84
75
 
85
76
  # rate
@@ -89,15 +80,29 @@ class BaseDataset(Dataset):
89
80
  batch = {"spike": spike, "gt_img": img, "rate": rate}
90
81
  return batch
91
82
 
83
+ def build_source(self, split: Literal["train", "test"] = "test"):
84
+ """Build the dataset source and prepare to be loaded files."""
85
+ # spike length
86
+ self.split = split
87
+ self.spike_length = self.cfg.spike_length_train if self.split == "train" else self.cfg.spike_length_test
88
+ # root dir
89
+ self.cfg.root_dir = Path(self.cfg.root_dir) if isinstance(self.cfg.root_dir, str) else self.cfg.root_dir
90
+ assert self.cfg.root_dir.exists(), f"No files found in {self.cfg.root_dir} for the specified dataset `{self.cfg.dataset_name}`."
91
+ # prepare
92
+ self.augmentor = Augmentor(self.cfg.crop_size) if self.cfg.use_aug == True and self.split == "train" else -1
93
+ self.prepare_data()
94
+ self.cache_data() if self.cfg.use_cache == True else -1
95
+ warnings.warn("Lengths of the image list and the spike list should be equal.") if len(self.img_list) != len(self.spike_list) else -1
96
+
92
97
  # todo: To be overridden
93
98
  def prepare_data(self):
94
99
  """Specify the spike and image files to be loaded."""
95
100
  # spike
96
- self.spike_dir = self.cfg.root_dir / self.cfg.split / self.cfg.spike_dir_name
101
+ self.spike_dir = self.cfg.root_dir / self.split / self.cfg.spike_dir_name
97
102
  self.spike_list = self.get_spike_files(self.spike_dir)
98
103
  # gt
99
104
  if self.cfg.with_img == True:
100
- self.img_dir = self.cfg.root_dir / self.cfg.split / self.cfg.img_dir_name
105
+ self.img_dir = self.cfg.root_dir / self.split / self.cfg.img_dir_name
101
106
  self.img_list = self.get_image_files(self.img_dir)
102
107
 
103
108
  # todo: To be overridden
@@ -115,12 +120,13 @@ class BaseDataset(Dataset):
115
120
  height=self.cfg.height,
116
121
  width=self.cfg.width,
117
122
  out_format="tensor",
123
+ version=self.cfg.spike_load_version
118
124
  )
119
125
  return spike
120
126
 
121
127
  def get_spike(self, idx):
122
128
  """Get and process the spike stream from the given idx."""
123
- spike_length = self.cfg.spike_length
129
+ spike_length = self.spike_length
124
130
  spike = self.load_spike(idx)
125
131
  assert spike.shape[0] >= spike_length, f"Given spike length {spike.shape[0]} smaller than the required length {spike_length}"
126
132
  spike_mid = spike.shape[0] // 2
@@ -4,21 +4,19 @@ from dataclasses import dataclass
4
4
 
5
5
 
6
6
  @dataclass
7
- class RealWorldConfig(BaseDatasetConfig):
8
- dataset_name: str = "realworld"
9
- root_dir: Path = Path(__file__).parent.parent / Path("data/recVidarReal2019")
7
+ class RealDataConfig(BaseDatasetConfig):
8
+ dataset_name: str = "realdata"
9
+ root_dir: Path = Path(__file__).parent.parent / Path("data/realdata")
10
10
  width: int = 400
11
11
  height: int = 250
12
12
  with_img: bool = False
13
13
  spike_length_train: int = -1
14
14
  spike_length_test: int = -1
15
15
  rate: float = 1
16
-
17
16
 
18
-
19
- class RealWorld(BaseDataset):
17
+ class RealData(BaseDataset):
20
18
  def __init__(self, cfg: BaseDatasetConfig):
21
- super(RealWorld, self).__init__(cfg)
19
+ super(RealData, self).__init__(cfg)
22
20
 
23
21
  def prepare_data(self):
24
22
  self.spike_dir = self.cfg.root_dir
@@ -8,7 +8,7 @@ import re
8
8
  @dataclass
9
9
  class REDS_BASEConfig(BaseDatasetConfig):
10
10
  dataset_name: str = "reds_base"
11
- root_dir: Path = Path(__file__).parent.parent / Path("data/REDS_BASE")
11
+ root_dir: Path = Path(__file__).parent.parent / Path("data/reds_base")
12
12
  width: int = 400
13
13
  height: int = 250
14
14
  with_img: bool = True
@@ -9,7 +9,7 @@ import numpy as np
9
9
  @dataclass
10
10
  class SZDataConfig(BaseDatasetConfig):
11
11
  dataset_name: str = "szdata"
12
- root_dir: Path = Path(__file__).parent.parent / Path("data/dataset")
12
+ root_dir: Path = Path(__file__).parent.parent / Path("data/szdata")
13
13
  width: int = 400
14
14
  height: int = 250
15
15
  with_img: bool = True
@@ -7,7 +7,7 @@ import torch
7
7
  @dataclass
8
8
  class UHSRConfig(BaseDatasetConfig):
9
9
  dataset_name: str = "uhsr"
10
- root_dir: Path = Path(__file__).parent.parent / Path("data/U-CALTECH")
10
+ root_dir: Path = Path(__file__).parent.parent / Path("data/u_caltech")
11
11
  width: int = 224
12
12
  height: int = 224
13
13
  with_img: bool = False
@@ -0,0 +1,23 @@
1
+ from torch.utils.data import Dataset
2
+ from pathlib import Path
3
+ from dataclasses import dataclass
4
+ from typing import Literal, Union
5
+ from typing import Optional
6
+ from spikezoo.datasets.base_dataset import BaseDatasetConfig,BaseDataset
7
+
8
+ @dataclass
9
+ class YourDatasetConfig(BaseDatasetConfig):
10
+ dataset_name: str = "yourdataset"
11
+ root_dir: Union[str, Path] = Path(__file__).parent.parent / Path("data/your_data_path")
12
+ width: int = 400
13
+ height: int = 250
14
+ with_img: bool = True
15
+ spike_length_train: int = -1
16
+ spike_length_test: int = -1
17
+ spike_dir_name: str = "spike_data"
18
+ img_dir_name: str = "sharp_data"
19
+ rate: float = 1
20
+
21
+ class YourDataset(BaseDataset):
22
+ def __init__(self, cfg: BaseDatasetConfig):
23
+ super(YourDataset, self).__init__(cfg)
@@ -1,16 +1,6 @@
1
1
  import importlib
2
2
  import inspect
3
3
  from spikezoo.models.base_model import BaseModel,BaseModelConfig
4
- from spikezoo.models.tfp_model import TFPModel,TFPConfig
5
- from spikezoo.models.tfi_model import TFIModel,TFIConfig
6
- from spikezoo.models.spk2imgnet_model import Spk2ImgNet,Spk2ImgNetConfig
7
- from spikezoo.models.wgse_model import WGSE,WGSEConfig
8
- from spikezoo.models.ssml_model import SSML,SSMLConfig
9
- from spikezoo.models.bsf_model import BSF,BSFConfig
10
- from spikezoo.models.stir_model import STIR,STIRConfig
11
- from spikezoo.models.ssir_model import SSIR,SSIRConfig
12
- from spikezoo.models.spikeclip_model import SpikeCLIP,SpikeCLIPConfig
13
-
14
4
 
15
5
  from spikezoo.utils.other_utils import getattr_case_insensitive
16
6
  import os
@@ -24,14 +14,17 @@ model_list = [file.split("_")[0] for file in files_list if file.endswith("_model
24
14
  def build_model_cfg(cfg: BaseModelConfig):
25
15
  """Build the model from the given model config."""
26
16
  # model module name
27
- module_name = cfg.model_name + "_model"
28
- assert cfg.model_name in model_list, f"Given model {cfg.model_name} not in our model zoo {model_list}."
29
- module_name = "spikezoo.models." + module_name
30
- module = importlib.import_module(module_name)
31
- # model,model_config
32
- model_name = cfg.model_name
33
- model_name = model_name + 'Model' if model_name == "base" else model_name
34
- model_cls: BaseModel = getattr_case_insensitive(module,model_name)
17
+ if cfg.model_cls_local == None:
18
+ module_name = cfg.model_name + "_model"
19
+ assert cfg.model_name in model_list, f"Given model {cfg.model_name} not in our model zoo {model_list}."
20
+ module_name = "spikezoo.models." + module_name
21
+ module = importlib.import_module(module_name)
22
+ # model,model_config
23
+ model_name = cfg.model_name
24
+ model_name = model_name + 'Model' if model_name == "base" else model_name
25
+ model_cls: BaseModel = getattr_case_insensitive(module,model_name)
26
+ else:
27
+ model_cls: BaseModel = cfg.model_cls_local
35
28
  model = model_cls(cfg)
36
29
  return model
37
30
 
@@ -44,7 +44,10 @@ class BaseModelConfig:
44
44
  multi_gpu: bool = False
45
45
  "Base url."
46
46
  base_url: str = "https://github.com/chenkang455/Spike-Zoo/releases/download"
47
-
47
+ "Load the model from local class or spikezoo lib. (None)"
48
+ model_cls_local: Optional[nn.Module] = None
49
+ "Load the arch from local class or spikezoo lib. (None)"
50
+ arch_cls_local: Optional[nn.Module] = None
48
51
 
49
52
  class BaseModel(nn.Module):
50
53
  def __init__(self, cfg: BaseModelConfig):
@@ -71,8 +74,11 @@ class BaseModel(nn.Module):
71
74
  ):
72
75
  """Build the network and load the pretrained weight."""
73
76
  # network
74
- module = importlib.import_module(f"spikezoo.archs.{self.cfg.model_name}.{self.cfg.model_file_name}")
75
- model_cls = getattr(module, self.cfg.model_cls_name)
77
+ if self.cfg.arch_cls_local == None:
78
+ module = importlib.import_module(f"spikezoo.archs.{self.cfg.model_name}.{self.cfg.model_file_name}")
79
+ model_cls = getattr(module, self.cfg.model_cls_name)
80
+ else:
81
+ model_cls = self.cfg.arch_cls_local
76
82
  # load model config parameters
77
83
  if version == "local":
78
84
  model = model_cls(**self.cfg.model_params)
@@ -129,7 +135,7 @@ class BaseModel(nn.Module):
129
135
  """Crop the spike length."""
130
136
  spike_length = spike.shape[1]
131
137
  spike_mid = spike_length // 2
132
- assert spike_length >= self.model_length, f"Spike input is not long enough, given {spike_length} frames < {self.cfg.model_length}."
138
+ assert spike_length >= self.model_length, f"Spike input is not long enough, given {spike_length} frames < {self.cfg.model_length} required by the {self.cfg.model_name}."
133
139
  # even length
134
140
  if self.model_length == self.model_half_length * 2:
135
141
  spike = spike[
@@ -0,0 +1,22 @@
1
+ from torch.utils.data import Dataset
2
+ from pathlib import Path
3
+ from dataclasses import dataclass
4
+ from typing import Literal, Union
5
+ from typing import Optional
6
+ from spikezoo.models.base_model import BaseModel, BaseModelConfig
7
+ from dataclasses import field
8
+ import torch.nn as nn
9
+
10
+
11
+ @dataclass
12
+ class YourModelConfig(BaseModelConfig):
13
+ model_name: str = "yourmodel" # 需与文件名保持一致
14
+ model_file_name: str = "arch.net" # archs路径下的模块路径
15
+ model_cls_name: str = "YourNet" # 模型类名
16
+ model_length: int = 41
17
+ require_params: bool = True
18
+ model_params: dict = field(default_factory=lambda: {"inDim": 41})
19
+
20
+ class YourModel(BaseModel):
21
+ def __init__(self, cfg: BaseModelConfig):
22
+ super(YourModel, self).__init__(cfg)