torch-rechub 0.0.4__tar.gz → 0.0.5__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (205) hide show
  1. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/.github/release.yml +4 -12
  2. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/.github/workflows/ci.yml +39 -11
  3. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/CHANGELOG.md +15 -0
  4. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/PKG-INFO +56 -45
  5. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/README.md +52 -44
  6. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/pyproject.toml +5 -1
  7. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/trainers/ctr_trainer.py +97 -0
  8. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/trainers/match_trainer.py +97 -0
  9. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/trainers/mtl_trainer.py +97 -0
  10. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/trainers/seq_trainer.py +134 -0
  11. torch_rechub-0.0.5/torch_rechub/utils/model_utils.py +233 -0
  12. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/utils/onnx_export.py +3 -136
  13. torch_rechub-0.0.5/torch_rechub/utils/visualization.py +271 -0
  14. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/.github/ISSUE_TEMPLATE/bug_report.md +0 -0
  15. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/.github/ISSUE_TEMPLATE/config.yml +0 -0
  16. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/.github/ISSUE_TEMPLATE/feature_request.md +0 -0
  17. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/.github/ISSUE_TEMPLATE/help_wanted.md +0 -0
  18. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/.github/dependabot.yml +0 -0
  19. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/.github/pull_request_template.md +0 -0
  20. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/.github/workflows/deploy.yml +0 -0
  21. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/.gitignore +0 -0
  22. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/.pre-commit-config.yaml +0 -0
  23. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/CODE_OF_CONDUCT.md +0 -0
  24. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/CONTRIBUTING.md +0 -0
  25. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/LICENSE +0 -0
  26. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/README_en.md +0 -0
  27. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/config/.flake8 +0 -0
  28. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/config/.pep8 +0 -0
  29. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/config/.pre-commit-config.yaml +0 -0
  30. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/config/CONFIG_GUIDE.md +0 -0
  31. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/config/fix_encoding.py +0 -0
  32. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/config/format_code.py +0 -0
  33. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/config/pytest.ini +0 -0
  34. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/.vitepress/config.mts +0 -0
  35. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/.vitepress/theme/custom.css +0 -0
  36. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/.vitepress/theme/index.ts +0 -0
  37. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/.vitepress/theme/style.css +0 -0
  38. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/en/blog/hllm_reproduction.md +0 -0
  39. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/en/blog/match.md +0 -0
  40. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/en/blog/rank.md +0 -0
  41. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/en/contributing.md +0 -0
  42. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/en/index.md +0 -0
  43. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/en/introduction.md +0 -0
  44. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/en/manual/api-reference/basic.md +0 -0
  45. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/en/manual/api-reference/models.md +0 -0
  46. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/en/manual/api-reference/trainers.md +0 -0
  47. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/en/manual/api-reference/utils.md +0 -0
  48. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/en/manual/faq.md +0 -0
  49. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/en/manual/getting-started.md +0 -0
  50. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/en/manual/installation.md +0 -0
  51. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/en/manual/tutorials/matching.md +0 -0
  52. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/en/manual/tutorials/multi-task.md +0 -0
  53. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/en/manual/tutorials/ranking.md +0 -0
  54. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/public/favicon.ico +0 -0
  55. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/public/img/banner.png +0 -0
  56. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/public/img/logo.png +0 -0
  57. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/public/img/logo_with_name.png +0 -0
  58. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/public/img/project_framework.jpg +0 -0
  59. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/public/img/win_install_annoy_error.png +0 -0
  60. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/public/pdf/1606.07792_l8JrVnuYXA.pdf +0 -0
  61. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/public/pdf/1703.04247_sFSyE7q3U1.pdf +0 -0
  62. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/public/pdf/1706.06978_0xZD_K10S2.pdf +0 -0
  63. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/public/pdf/1708.05123_f3lKSqxIvw.pdf +0 -0
  64. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/public/pdf/1711.00165_eosOSOmTfE.pdf +0 -0
  65. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/public/pdf/1804.07931_ybf_jOAFRp.pdf +0 -0
  66. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/public/pdf/1808.09781-3_bmRm284Rxd.pdf +0 -0
  67. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/public/pdf/1808.09781v1.pdf +0 -0
  68. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/public/pdf/1905.06336_2oH3RMtROA.pdf +0 -0
  69. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/public/pdf/2006.11632_qiN67CrHNs.pdf +0 -0
  70. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/public/pdf/2020 (Tencent) (Recsys) [PLE] Progressive Layered .pdf +0 -0
  71. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/public/pdf/2102.09267_cdwBFKPCrj.pdf +0 -0
  72. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/public/pdf/2105.08489-2_XnVVGxN9GG.pdf +0 -0
  73. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/public/pdf/2203.06801v1-3_qUTY4TbvSL.pdf +0 -0
  74. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/public/pdf/2959100.2959190_jRzTU81Xmq.pdf +0 -0
  75. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/public/pdf/3219819.3219950_aTMFXHL3JB.pdf +0 -0
  76. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/public/pdf/3219819.3220007_zvaZg_CZ6z.pdf +0 -0
  77. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/public/pdf/4545-Article Text-7584-1-10-20190706.pdf +0 -0
  78. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/public/pdf/6c8a86c981a62b0126a11896b7f6ae0dae4c3566_1QYYhqJR8.pdf +0 -0
  79. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/public/pdf/Caruana1997_Article_MultitaskLearning_ySprcjzJ6v.pdf +0 -0
  80. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/public/pdf/DCN V2 Improved Deep & Cross Network and Practical.pdf +0 -0
  81. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/public/pdf/cikm2013_DSSM_fullversion_c9ZSdM19XJ.pdf +0 -0
  82. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/zh/blog/hllm_reproduction.md +0 -0
  83. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/zh/blog/hstu_reproduction.md +0 -0
  84. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/zh/blog/match.md +0 -0
  85. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/zh/blog/rank.md +0 -0
  86. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/zh/contributing.md +0 -0
  87. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/zh/index.md +0 -0
  88. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/zh/introduction.md +0 -0
  89. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/zh/manual/api-reference/basic.md +0 -0
  90. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/zh/manual/api-reference/models.md +0 -0
  91. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/zh/manual/api-reference/trainers.md +0 -0
  92. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/zh/manual/api-reference/utils.md +0 -0
  93. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/zh/manual/faq.md +0 -0
  94. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/zh/manual/getting-started.md +0 -0
  95. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/zh/manual/installation.md +0 -0
  96. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/zh/manual/tutorials/matching.md +0 -0
  97. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/zh/manual/tutorials/multi-task.md +0 -0
  98. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/zh/manual/tutorials/ranking.md +0 -0
  99. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/docs/zh//345/217/202/350/200/203/350/265/204/346/226/231//345/217/202/350/200/203/350/265/204/346/226/231.md" +0 -0
  100. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/examples/generative/data/amazon-books/README.md +0 -0
  101. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/examples/generative/data/amazon-books/preprocess_amazon_books.py +0 -0
  102. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/examples/generative/data/amazon-books/preprocess_amazon_books_hllm.py +0 -0
  103. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/examples/generative/data/ml-1m/README +0 -0
  104. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/examples/generative/data/ml-1m/preprocess_hllm_data.py +0 -0
  105. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/examples/generative/data/ml-1m/preprocess_ml_hstu.py +0 -0
  106. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/examples/generative/run_hllm_amazon_books.py +0 -0
  107. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/examples/generative/run_hllm_movielens.py +0 -0
  108. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/examples/generative/run_hstu_movielens.py +0 -0
  109. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/examples/matching/README.md +0 -0
  110. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/examples/matching/data/million-song-dataset/process_msd.py +0 -0
  111. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/examples/matching/data/ml-1m/preprocess_ml.py +0 -0
  112. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/examples/matching/data/session_based/preprocess_session_based.py +0 -0
  113. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/examples/matching/data/yidian_news/preprocess.py +0 -0
  114. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/examples/matching/movielens_utils.py +0 -0
  115. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/examples/matching/run_ml_comirec.py +0 -0
  116. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/examples/matching/run_ml_dssm.py +0 -0
  117. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/examples/matching/run_ml_facebook_dssm.py +0 -0
  118. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/examples/matching/run_ml_gru4rec.py +0 -0
  119. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/examples/matching/run_ml_mind.py +0 -0
  120. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/examples/matching/run_ml_sine.py +0 -0
  121. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/examples/matching/run_ml_youtube_dnn.py +0 -0
  122. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/examples/matching/run_ml_youtube_sbc.py +0 -0
  123. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/examples/matching/run_sbr.py +0 -0
  124. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/examples/ranking/README.md +0 -0
  125. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/examples/ranking/data/ali-ccp/preprocess_ali_ccp.py +0 -0
  126. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/examples/ranking/data/amazon-beauty/preprocess_amazon_beauty.py +0 -0
  127. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/examples/ranking/data/amazon-books/preprocess_amazon_books.py +0 -0
  128. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/examples/ranking/data/amazon-electronics/preprocess_amazon_electronics.py +0 -0
  129. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/examples/ranking/data/avazu/download_avazu.py +0 -0
  130. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/examples/ranking/data/census-income/preprocess_census.py +0 -0
  131. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/examples/ranking/run_ali_ccp_ctr_ranking.py +0 -0
  132. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/examples/ranking/run_ali_ccp_multi_task.py +0 -0
  133. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/examples/ranking/run_aliexpress.py +0 -0
  134. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/examples/ranking/run_amazon_electronics.py +0 -0
  135. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/examples/ranking/run_avazu.py +0 -0
  136. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/examples/ranking/run_census.py +0 -0
  137. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/examples/ranking/run_criteo.py +0 -0
  138. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/examples/ranking/run_gradnorm.py +0 -0
  139. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/examples/ranking/run_metabalance.py +0 -0
  140. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/package-lock.json +0 -0
  141. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/package.json +0 -0
  142. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/tests/test_e2e_matching.py +0 -0
  143. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/tests/test_e2e_multitask.py +0 -0
  144. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/tests/test_e2e_ranking.py +0 -0
  145. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/tests/test_onnx_export.py +0 -0
  146. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/tests/test_regularization.py +0 -0
  147. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/__init__.py +0 -0
  148. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/basic/__init__.py +0 -0
  149. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/basic/activation.py +0 -0
  150. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/basic/callback.py +0 -0
  151. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/basic/features.py +0 -0
  152. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/basic/initializers.py +0 -0
  153. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/basic/layers.py +0 -0
  154. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/basic/loss_func.py +0 -0
  155. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/basic/metaoptimizer.py +0 -0
  156. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/basic/metric.py +0 -0
  157. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/models/__init__.py +0 -0
  158. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/models/generative/__init__.py +0 -0
  159. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/models/generative/hllm.py +0 -0
  160. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/models/generative/hstu.py +0 -0
  161. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/models/matching/__init__.py +0 -0
  162. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/models/matching/comirec.py +0 -0
  163. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/models/matching/dssm.py +0 -0
  164. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/models/matching/dssm_facebook.py +0 -0
  165. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/models/matching/dssm_senet.py +0 -0
  166. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/models/matching/gru4rec.py +0 -0
  167. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/models/matching/mind.py +0 -0
  168. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/models/matching/narm.py +0 -0
  169. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/models/matching/sasrec.py +0 -0
  170. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/models/matching/sine.py +0 -0
  171. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/models/matching/stamp.py +0 -0
  172. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/models/matching/youtube_dnn.py +0 -0
  173. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/models/matching/youtube_sbc.py +0 -0
  174. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/models/multi_task/__init__.py +0 -0
  175. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/models/multi_task/aitm.py +0 -0
  176. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/models/multi_task/esmm.py +0 -0
  177. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/models/multi_task/mmoe.py +0 -0
  178. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/models/multi_task/ple.py +0 -0
  179. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/models/multi_task/shared_bottom.py +0 -0
  180. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/models/ranking/__init__.py +0 -0
  181. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/models/ranking/afm.py +0 -0
  182. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/models/ranking/autoint.py +0 -0
  183. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/models/ranking/bst.py +0 -0
  184. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/models/ranking/dcn.py +0 -0
  185. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/models/ranking/dcn_v2.py +0 -0
  186. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/models/ranking/deepffm.py +0 -0
  187. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/models/ranking/deepfm.py +0 -0
  188. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/models/ranking/dien.py +0 -0
  189. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/models/ranking/din.py +0 -0
  190. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/models/ranking/edcn.py +0 -0
  191. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/models/ranking/fibinet.py +0 -0
  192. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/models/ranking/widedeep.py +0 -0
  193. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/trainers/__init__.py +0 -0
  194. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/trainers/matching.md +0 -0
  195. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/utils/__init__.py +0 -0
  196. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/utils/data.py +0 -0
  197. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/utils/hstu_utils.py +0 -0
  198. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/utils/match.py +0 -0
  199. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/torch_rechub/utils/mtl.py +0 -0
  200. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/tutorials/DIN.ipynb +0 -0
  201. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/tutorials/DeepFM.ipynb +0 -0
  202. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/tutorials/Matching.ipynb +0 -0
  203. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/tutorials/Milvus.ipynb +0 -0
  204. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/tutorials/Multi_Task.ipynb +0 -0
  205. {torch_rechub-0.0.4 → torch_rechub-0.0.5}/uv.lock +0 -0
@@ -8,6 +8,7 @@ changelog:
8
8
  - ignore-for-release
9
9
  - duplicate
10
10
  - invalid
11
+ - wontfix
11
12
  authors:
12
13
  - dependabot
13
14
  - dependabot[bot]
@@ -16,30 +17,22 @@ changelog:
16
17
  - title: "✨ 新特性 / Features"
17
18
  labels:
18
19
  - enhancement
19
- - feature
20
- - feat
21
20
 
22
21
  - title: "🐛 Bug 修复 / Bug Fixes"
23
22
  labels:
24
23
  - bug
25
- - fix
26
- - bugfix
27
24
 
28
25
  - title: "⚡ 性能优化 / Performance"
29
26
  labels:
30
27
  - performance
31
- - perf
32
28
 
33
29
  - title: "📝 文档更新 / Documentation"
34
30
  labels:
35
31
  - documentation
36
- - docs
37
32
 
38
- - title: "🔧 维护更新 / Maintenance"
33
+ - title: "🔧 模型更新 / Models"
39
34
  labels:
40
- - maintenance
41
- - chore
42
- - refactor
35
+ - model
43
36
 
44
37
  - title: "📦 依赖更新 / Dependencies"
45
38
  labels:
@@ -47,5 +40,4 @@ changelog:
47
40
 
48
41
  - title: "🔄 其他变更 / Other Changes"
49
42
  labels:
50
- - "*"
51
-
43
+ - "*"
@@ -1,8 +1,9 @@
1
1
  # ===================================================================
2
2
  # CI/CD 流程配置 - 代码质量检查、测试、构建、发布
3
3
  # ===================================================================
4
- # 这个workflow在代码文件变更时触发,运行完整的CI/CD流程
5
- # 排除docs目录和markdown文件的变更
4
+ # 触发条件:
5
+ # - push/pull_request: 运行完整 CI 检查(lint, test, security, build)
6
+ # - release: 仅运行发布流程(跳过已执行的检查)
6
7
 
7
8
  name: CI/CD Pipeline
8
9
 
@@ -37,11 +38,13 @@ env:
37
38
 
38
39
  jobs:
39
40
  # ===================================================================
40
- # 代码质量检查
41
+ # 代码质量检查 (仅在 push/PR 时运行,release 时跳过)
41
42
  # ===================================================================
42
43
  lint:
43
44
  name: Code Quality Checks
44
45
  runs-on: ubuntu-latest
46
+ # 跳过 release 事件,因为代码已在合并时检查过
47
+ if: github.event_name != 'release'
45
48
 
46
49
  steps:
47
50
  - name: Checkout code
@@ -88,11 +91,13 @@ jobs:
88
91
 
89
92
  # ===================================================================
90
93
  # 完整测试 (Python 3.9) - 运行所有测试和覆盖率报告
94
+ # (仅在 push/PR 时运行,release 时跳过)
91
95
  # ===================================================================
92
96
  test:
93
97
  name: Full Test Suite (Python 3.9)
94
98
  runs-on: ${{ matrix.os }}
95
99
  needs: lint
100
+ if: github.event_name != 'release'
96
101
 
97
102
  strategy:
98
103
  fail-fast: false
@@ -152,11 +157,13 @@ jobs:
152
157
 
153
158
  # ===================================================================
154
159
  # 依赖兼容性验证 (Python 3.10+) - 仅验证依赖安装成功
160
+ # (仅在 push/PR 时运行,release 时跳过)
155
161
  # ===================================================================
156
162
  compatibility:
157
163
  name: Dependency Check (Python ${{ matrix.python-version }})
158
164
  runs-on: ubuntu-latest
159
165
  needs: lint
166
+ if: github.event_name != 'release'
160
167
 
161
168
  strategy:
162
169
  fail-fast: false
@@ -186,12 +193,13 @@ jobs:
186
193
  python -c "import onnx; import onnxruntime; print('ONNX dependencies OK')"
187
194
 
188
195
  # ===================================================================
189
- # 安全检查
196
+ # 安全检查 (仅在 push/PR 时运行,release 时跳过)
190
197
  # ===================================================================
191
198
  security:
192
199
  name: Security Scan
193
200
  runs-on: ubuntu-latest
194
201
  needs: lint
202
+ if: github.event_name != 'release'
195
203
 
196
204
  steps:
197
205
  - name: Checkout code
@@ -220,12 +228,13 @@ jobs:
220
228
  path: bandit-report.json
221
229
 
222
230
  # ===================================================================
223
- # 构建检查
231
+ # 构建检查 (仅在 push/PR 时运行,release 时跳过)
224
232
  # ===================================================================
225
233
  build:
226
234
  name: Build Package
227
235
  runs-on: ubuntu-latest
228
236
  needs: [test, compatibility, security]
237
+ if: github.event_name != 'release'
229
238
 
230
239
  steps:
231
240
  - name: Checkout code
@@ -256,18 +265,23 @@ jobs:
256
265
  path: dist/
257
266
 
258
267
  # ===================================================================
259
- # 自动发布到PyPI (使用 uv)
260
- # 功能:从 GitHub Release 自动同步版本号、更新 CHANGELOG、发布到 PyPI
268
+ # 自动发布到 PyPI 和 GitHub Release (使用 uv)
269
+ # 功能:
270
+ # - 从 GitHub Release 自动同步版本号
271
+ # - 更新 CHANGELOG.md
272
+ # - 构建并发布到 PyPI
273
+ # - 上传构建产物到 GitHub Release 页面
274
+ # 注意:此 job 仅在 release 事件时运行,不依赖其他 job(代码已在合并时检查过)
261
275
  # ===================================================================
262
276
  publish:
263
- name: Publish to PyPI
277
+ name: Publish to PyPI & GitHub Release
264
278
  runs-on: ubuntu-latest
265
- needs: build
279
+ # 不再依赖 build job,直接运行(代码质量已在 PR 合并时验证)
266
280
  if: github.event_name == 'release' && github.event.action == 'published'
267
281
  environment: pypi
268
282
  permissions:
269
283
  id-token: write # Required for trusted publishing
270
- contents: write # Required for pushing changes back to repo
284
+ contents: write # Required for pushing changes and uploading release assets
271
285
 
272
286
  steps:
273
287
  - name: Checkout code
@@ -342,14 +356,28 @@ jobs:
342
356
  run: uv python install ${{ env.PYTHON_VERSION }}
343
357
 
344
358
  - name: Build package with uv
359
+ id: build
345
360
  run: |
346
361
  uv build
347
362
  echo "✅ Package built successfully"
348
363
  ls -la dist/
364
+ # 输出构建产物文件名供后续步骤使用
365
+ echo "WHEEL_FILE=$(ls dist/*.whl)" >> $GITHUB_OUTPUT
366
+ echo "SDIST_FILE=$(ls dist/*.tar.gz)" >> $GITHUB_OUTPUT
349
367
 
350
368
  - name: Publish to PyPI
351
369
  env:
352
370
  UV_PUBLISH_TOKEN: ${{ secrets.PYPI_API_TOKEN }}
353
371
  run: |
354
372
  uv publish
355
- echo "🚀 Published to PyPI successfully!"
373
+ echo "🚀 Published to PyPI successfully!"
374
+
375
+ - name: Upload release assets to GitHub Release
376
+ uses: softprops/action-gh-release@v2
377
+ with:
378
+ files: |
379
+ dist/*.whl
380
+ dist/*.tar.gz
381
+ fail_on_unmatched_files: true
382
+ env:
383
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
@@ -7,6 +7,21 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
7
7
 
8
8
  ---
9
9
 
10
+ ## [0.0.5] - 2025-12-05
11
+
12
+ <!-- Release notes generated using configuration in .github/release.yml at main -->
13
+
14
+ ## What's Changed
15
+ ### ✨ 新特性 / Features
16
+ * Add torchview to Support Model Visualization && Update CI/CD and release workflows by @1985312383 in https://github.com/datawhalechina/torch-rechub/pull/141
17
+
18
+
19
+ **Full Changelog**: https://github.com/datawhalechina/torch-rechub/compare/v0.0.4...v0.0.5
20
+
21
+ ---
22
+
23
+
24
+
10
25
  ## [0.0.4] - 2025-12-04
11
26
 
12
27
  <!-- Release notes generated using configuration in .github/release.yml at main -->
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: torch-rechub
3
- Version: 0.0.4
3
+ Version: 0.0.5
4
4
  Summary: A Pytorch Toolbox for Recommendation Models, Easy-to-use and Easy-to-extend.
5
5
  Project-URL: Homepage, https://github.com/datawhalechina/torch-rechub
6
6
  Project-URL: Documentation, https://www.torch-rechub.com
@@ -41,6 +41,9 @@ Requires-Dist: yapf==0.43.0; extra == 'dev'
41
41
  Provides-Extra: onnx
42
42
  Requires-Dist: onnx>=1.12.0; extra == 'onnx'
43
43
  Requires-Dist: onnxruntime>=1.12.0; extra == 'onnx'
44
+ Provides-Extra: visualization
45
+ Requires-Dist: graphviz>=0.20; extra == 'visualization'
46
+ Requires-Dist: torchview>=0.2.6; extra == 'visualization'
44
47
  Description-Content-Type: text/markdown
45
48
 
46
49
  # 🔥 Torch-RecHub - 轻量、高效、易用的 PyTorch 推荐系统框架
@@ -69,13 +72,13 @@ Description-Content-Type: text/markdown
69
72
 
70
73
  ## 🎯 为什么选择 Torch-RecHub?
71
74
 
72
- | 特性 | Torch-RecHub | 其他框架 |
73
- |------|-------------|---------|
74
- | 代码行数 | **10行** 完成训练+评估+部署 | 100+ 行 |
75
- | 模型覆盖 | **30+** 主流模型 | 有限 |
76
- | 生成式推荐 | ✅ HSTU/HLLM (Meta 2024) | ❌ |
77
- | ONNX 一键导出 | ✅ 内置支持 | 需手动适配 |
78
- | 学习曲线 | 极低 | 陡峭 |
75
+ | 特性 | Torch-RecHub | 其他框架 |
76
+ | ------------- | --------------------------- | ---------- |
77
+ | 代码行数 | **10行** 完成训练+评估+部署 | 100+ 行 |
78
+ | 模型覆盖 | **30+** 主流模型 | 有限 |
79
+ | 生成式推荐 | ✅ HSTU/HLLM (Meta 2024) | ❌ |
80
+ | ONNX 一键导出 | ✅ 内置支持 | 需手动适配 |
81
+ | 学习曲线 | 极低 | 陡峭 |
79
82
 
80
83
  ## ✨ 特性
81
84
 
@@ -205,52 +208,52 @@ torch-rechub/ # 根目录
205
208
 
206
209
  ### 排序模型 (Ranking Models) - 13个
207
210
 
208
- | 模型 | 论文 | 简介 |
209
- |------|------|------|
210
- | **DeepFM** | [IJCAI 2017](https://arxiv.org/abs/1703.04247) | FM + Deep 联合训练 |
211
- | **Wide&Deep** | [DLRS 2016](https://arxiv.org/abs/1606.07792) | 记忆 + 泛化能力结合 |
212
- | **DCN** | [KDD 2017](https://arxiv.org/abs/1708.05123) | 显式特征交叉网络 |
213
- | **DCN-v2** | [WWW 2021](https://arxiv.org/abs/2008.13535) | 增强版交叉网络 |
214
- | **DIN** | [KDD 2018](https://arxiv.org/abs/1706.06978) | 注意力机制捕捉用户兴趣 |
215
- | **DIEN** | [AAAI 2019](https://arxiv.org/abs/1809.03672) | 兴趣演化建模 |
216
- | **BST** | [DLP-KDD 2019](https://arxiv.org/abs/1905.06874) | Transformer 序列建模 |
217
- | **AFM** | [IJCAI 2017](https://arxiv.org/abs/1708.04617) | 注意力因子分解机 |
218
- | **AutoInt** | [CIKM 2019](https://arxiv.org/abs/1810.11921) | 自动特征交互学习 |
219
- | **FiBiNET** | [RecSys 2019](https://arxiv.org/abs/1905.09433) | 特征重要性 + 双线性交互 |
220
- | **DeepFFM** | [RecSys 2019](https://arxiv.org/abs/1611.00144) | 场感知因子分解机 |
221
- | **EDCN** | [KDD 2021](https://arxiv.org/abs/2106.03032) | 增强型交叉网络 |
211
+ | 模型 | 论文 | 简介 |
212
+ | ------------- | ------------------------------------------------ | ----------------------- |
213
+ | **DeepFM** | [IJCAI 2017](https://arxiv.org/abs/1703.04247) | FM + Deep 联合训练 |
214
+ | **Wide&Deep** | [DLRS 2016](https://arxiv.org/abs/1606.07792) | 记忆 + 泛化能力结合 |
215
+ | **DCN** | [KDD 2017](https://arxiv.org/abs/1708.05123) | 显式特征交叉网络 |
216
+ | **DCN-v2** | [WWW 2021](https://arxiv.org/abs/2008.13535) | 增强版交叉网络 |
217
+ | **DIN** | [KDD 2018](https://arxiv.org/abs/1706.06978) | 注意力机制捕捉用户兴趣 |
218
+ | **DIEN** | [AAAI 2019](https://arxiv.org/abs/1809.03672) | 兴趣演化建模 |
219
+ | **BST** | [DLP-KDD 2019](https://arxiv.org/abs/1905.06874) | Transformer 序列建模 |
220
+ | **AFM** | [IJCAI 2017](https://arxiv.org/abs/1708.04617) | 注意力因子分解机 |
221
+ | **AutoInt** | [CIKM 2019](https://arxiv.org/abs/1810.11921) | 自动特征交互学习 |
222
+ | **FiBiNET** | [RecSys 2019](https://arxiv.org/abs/1905.09433) | 特征重要性 + 双线性交互 |
223
+ | **DeepFFM** | [RecSys 2019](https://arxiv.org/abs/1611.00144) | 场感知因子分解机 |
224
+ | **EDCN** | [KDD 2021](https://arxiv.org/abs/2106.03032) | 增强型交叉网络 |
222
225
 
223
226
  ### 召回模型 (Matching Models) - 12个
224
227
 
225
- | 模型 | 论文 | 简介 |
226
- |------|------|------|
227
- | **DSSM** | [CIKM 2013](https://posenhuang.github.io/papers/cikm2013_DSSM_fullversion.pdf) | 经典双塔召回模型 |
228
- | **YoutubeDNN** | [RecSys 2016](https://dl.acm.org/doi/10.1145/2959100.2959190) | YouTube 深度召回 |
229
- | **YoutubeSBC** | [RecSys 2019](https://dl.acm.org/doi/10.1145/3298689.3346997) | 采样偏差校正版本 |
230
- | **MIND** | [CIKM 2019](https://arxiv.org/abs/1904.08030) | 多兴趣动态路由 |
231
- | **SINE** | [WSDM 2021](https://arxiv.org/abs/2103.06920) | 稀疏兴趣网络 |
232
- | **GRU4Rec** | [ICLR 2016](https://arxiv.org/abs/1511.06939) | GRU 序列推荐 |
233
- | **SASRec** | [ICDM 2018](https://arxiv.org/abs/1808.09781) | 自注意力序列推荐 |
234
- | **NARM** | [CIKM 2017](https://arxiv.org/abs/1711.04725) | 神经注意力会话推荐 |
235
- | **STAMP** | [KDD 2018](https://dl.acm.org/doi/10.1145/3219819.3219895) | 短期注意力记忆优先 |
236
- | **ComiRec** | [KDD 2020](https://arxiv.org/abs/2005.09347) | 可控多兴趣推荐 |
228
+ | 模型 | 论文 | 简介 |
229
+ | -------------- | ------------------------------------------------------------------------------ | ------------------ |
230
+ | **DSSM** | [CIKM 2013](https://posenhuang.github.io/papers/cikm2013_DSSM_fullversion.pdf) | 经典双塔召回模型 |
231
+ | **YoutubeDNN** | [RecSys 2016](https://dl.acm.org/doi/10.1145/2959100.2959190) | YouTube 深度召回 |
232
+ | **YoutubeSBC** | [RecSys 2019](https://dl.acm.org/doi/10.1145/3298689.3346997) | 采样偏差校正版本 |
233
+ | **MIND** | [CIKM 2019](https://arxiv.org/abs/1904.08030) | 多兴趣动态路由 |
234
+ | **SINE** | [WSDM 2021](https://arxiv.org/abs/2103.06920) | 稀疏兴趣网络 |
235
+ | **GRU4Rec** | [ICLR 2016](https://arxiv.org/abs/1511.06939) | GRU 序列推荐 |
236
+ | **SASRec** | [ICDM 2018](https://arxiv.org/abs/1808.09781) | 自注意力序列推荐 |
237
+ | **NARM** | [CIKM 2017](https://arxiv.org/abs/1711.04725) | 神经注意力会话推荐 |
238
+ | **STAMP** | [KDD 2018](https://dl.acm.org/doi/10.1145/3219819.3219895) | 短期注意力记忆优先 |
239
+ | **ComiRec** | [KDD 2020](https://arxiv.org/abs/2005.09347) | 可控多兴趣推荐 |
237
240
 
238
241
  ### 多任务模型 (Multi-Task Models) - 5个
239
242
 
240
- | 模型 | 论文 | 简介 |
241
- |------|------|------|
242
- | **ESMM** | [SIGIR 2018](https://arxiv.org/abs/1804.07931) | 全空间多任务建模 |
243
- | **MMoE** | [KDD 2018](https://dl.acm.org/doi/10.1145/3219819.3220007) | 多门控专家混合 |
244
- | **PLE** | [RecSys 2020](https://dl.acm.org/doi/10.1145/3383313.3412236) | 渐进式分层提取 |
245
- | **AITM** | [KDD 2021](https://arxiv.org/abs/2105.08489) | 自适应信息迁移 |
246
- | **SharedBottom** | - | 经典多任务共享底层 |
243
+ | 模型 | 论文 | 简介 |
244
+ | ---------------- | ------------------------------------------------------------- | ------------------ |
245
+ | **ESMM** | [SIGIR 2018](https://arxiv.org/abs/1804.07931) | 全空间多任务建模 |
246
+ | **MMoE** | [KDD 2018](https://dl.acm.org/doi/10.1145/3219819.3220007) | 多门控专家混合 |
247
+ | **PLE** | [RecSys 2020](https://dl.acm.org/doi/10.1145/3383313.3412236) | 渐进式分层提取 |
248
+ | **AITM** | [KDD 2021](https://arxiv.org/abs/2105.08489) | 自适应信息迁移 |
249
+ | **SharedBottom** | - | 经典多任务共享底层 |
247
250
 
248
251
  ### 生成式推荐 (Generative Recommendation) - 2个
249
252
 
250
- | 模型 | 论文 | 简介 |
251
- |------|------|------|
253
+ | 模型 | 论文 | 简介 |
254
+ | -------- | --------------------------------------------- | -------------------------------------------- |
252
255
  | **HSTU** | [Meta 2024](https://arxiv.org/abs/2402.17152) | 层级序列转换单元,支撑 Meta 万亿参数推荐系统 |
253
- | **HLLM** | [2024](https://arxiv.org/abs/2409.12740) | 层级大语言模型推荐,融合 LLM 语义理解能力 |
256
+ | **HLLM** | [2024](https://arxiv.org/abs/2409.12740) | 层级大语言模型推荐,融合 LLM 语义理解能力 |
254
257
 
255
258
  ## 📊 支持的数据集
256
259
 
@@ -338,11 +341,19 @@ model = DSSM(user_features, item_features, temperature=0.02,
338
341
  match_trainer = MatchTrainer(model)
339
342
  match_trainer.fit(train_dl)
340
343
  match_trainer.export_onnx("dssm.onnx")
341
- # 双塔模型可分别导出用户塔和物品塔:
344
+ # 双塔模型可分别导出用户塔和物品塔:
342
345
  # match_trainer.export_onnx("user_tower.onnx", mode="user")
343
346
  # match_trainer.export_onnx("dssm_item.onnx", tower="item")
344
347
  ```
345
348
 
349
+ ### 模型可视化
350
+
351
+ ```python
352
+ # 可视化模型架构(需要安装: pip install torch-rechub[visualization])
353
+ graph = ctr_trainer.visualization(depth=4) # 生成计算图
354
+ ctr_trainer.visualization(save_path="model.pdf", dpi=300) # 保存为高清 PDF
355
+ ```
356
+
346
357
  ## 👨‍💻‍ 贡献者
347
358
 
348
359
  感谢所有的贡献者!
@@ -24,13 +24,13 @@
24
24
 
25
25
  ## 🎯 为什么选择 Torch-RecHub?
26
26
 
27
- | 特性 | Torch-RecHub | 其他框架 |
28
- |------|-------------|---------|
29
- | 代码行数 | **10行** 完成训练+评估+部署 | 100+ 行 |
30
- | 模型覆盖 | **30+** 主流模型 | 有限 |
31
- | 生成式推荐 | ✅ HSTU/HLLM (Meta 2024) | ❌ |
32
- | ONNX 一键导出 | ✅ 内置支持 | 需手动适配 |
33
- | 学习曲线 | 极低 | 陡峭 |
27
+ | 特性 | Torch-RecHub | 其他框架 |
28
+ | ------------- | --------------------------- | ---------- |
29
+ | 代码行数 | **10行** 完成训练+评估+部署 | 100+ 行 |
30
+ | 模型覆盖 | **30+** 主流模型 | 有限 |
31
+ | 生成式推荐 | ✅ HSTU/HLLM (Meta 2024) | ❌ |
32
+ | ONNX 一键导出 | ✅ 内置支持 | 需手动适配 |
33
+ | 学习曲线 | 极低 | 陡峭 |
34
34
 
35
35
  ## ✨ 特性
36
36
 
@@ -160,52 +160,52 @@ torch-rechub/ # 根目录
160
160
 
161
161
  ### 排序模型 (Ranking Models) - 13个
162
162
 
163
- | 模型 | 论文 | 简介 |
164
- |------|------|------|
165
- | **DeepFM** | [IJCAI 2017](https://arxiv.org/abs/1703.04247) | FM + Deep 联合训练 |
166
- | **Wide&Deep** | [DLRS 2016](https://arxiv.org/abs/1606.07792) | 记忆 + 泛化能力结合 |
167
- | **DCN** | [KDD 2017](https://arxiv.org/abs/1708.05123) | 显式特征交叉网络 |
168
- | **DCN-v2** | [WWW 2021](https://arxiv.org/abs/2008.13535) | 增强版交叉网络 |
169
- | **DIN** | [KDD 2018](https://arxiv.org/abs/1706.06978) | 注意力机制捕捉用户兴趣 |
170
- | **DIEN** | [AAAI 2019](https://arxiv.org/abs/1809.03672) | 兴趣演化建模 |
171
- | **BST** | [DLP-KDD 2019](https://arxiv.org/abs/1905.06874) | Transformer 序列建模 |
172
- | **AFM** | [IJCAI 2017](https://arxiv.org/abs/1708.04617) | 注意力因子分解机 |
173
- | **AutoInt** | [CIKM 2019](https://arxiv.org/abs/1810.11921) | 自动特征交互学习 |
174
- | **FiBiNET** | [RecSys 2019](https://arxiv.org/abs/1905.09433) | 特征重要性 + 双线性交互 |
175
- | **DeepFFM** | [RecSys 2019](https://arxiv.org/abs/1611.00144) | 场感知因子分解机 |
176
- | **EDCN** | [KDD 2021](https://arxiv.org/abs/2106.03032) | 增强型交叉网络 |
163
+ | 模型 | 论文 | 简介 |
164
+ | ------------- | ------------------------------------------------ | ----------------------- |
165
+ | **DeepFM** | [IJCAI 2017](https://arxiv.org/abs/1703.04247) | FM + Deep 联合训练 |
166
+ | **Wide&Deep** | [DLRS 2016](https://arxiv.org/abs/1606.07792) | 记忆 + 泛化能力结合 |
167
+ | **DCN** | [KDD 2017](https://arxiv.org/abs/1708.05123) | 显式特征交叉网络 |
168
+ | **DCN-v2** | [WWW 2021](https://arxiv.org/abs/2008.13535) | 增强版交叉网络 |
169
+ | **DIN** | [KDD 2018](https://arxiv.org/abs/1706.06978) | 注意力机制捕捉用户兴趣 |
170
+ | **DIEN** | [AAAI 2019](https://arxiv.org/abs/1809.03672) | 兴趣演化建模 |
171
+ | **BST** | [DLP-KDD 2019](https://arxiv.org/abs/1905.06874) | Transformer 序列建模 |
172
+ | **AFM** | [IJCAI 2017](https://arxiv.org/abs/1708.04617) | 注意力因子分解机 |
173
+ | **AutoInt** | [CIKM 2019](https://arxiv.org/abs/1810.11921) | 自动特征交互学习 |
174
+ | **FiBiNET** | [RecSys 2019](https://arxiv.org/abs/1905.09433) | 特征重要性 + 双线性交互 |
175
+ | **DeepFFM** | [RecSys 2019](https://arxiv.org/abs/1611.00144) | 场感知因子分解机 |
176
+ | **EDCN** | [KDD 2021](https://arxiv.org/abs/2106.03032) | 增强型交叉网络 |
177
177
 
178
178
  ### 召回模型 (Matching Models) - 12个
179
179
 
180
- | 模型 | 论文 | 简介 |
181
- |------|------|------|
182
- | **DSSM** | [CIKM 2013](https://posenhuang.github.io/papers/cikm2013_DSSM_fullversion.pdf) | 经典双塔召回模型 |
183
- | **YoutubeDNN** | [RecSys 2016](https://dl.acm.org/doi/10.1145/2959100.2959190) | YouTube 深度召回 |
184
- | **YoutubeSBC** | [RecSys 2019](https://dl.acm.org/doi/10.1145/3298689.3346997) | 采样偏差校正版本 |
185
- | **MIND** | [CIKM 2019](https://arxiv.org/abs/1904.08030) | 多兴趣动态路由 |
186
- | **SINE** | [WSDM 2021](https://arxiv.org/abs/2103.06920) | 稀疏兴趣网络 |
187
- | **GRU4Rec** | [ICLR 2016](https://arxiv.org/abs/1511.06939) | GRU 序列推荐 |
188
- | **SASRec** | [ICDM 2018](https://arxiv.org/abs/1808.09781) | 自注意力序列推荐 |
189
- | **NARM** | [CIKM 2017](https://arxiv.org/abs/1711.04725) | 神经注意力会话推荐 |
190
- | **STAMP** | [KDD 2018](https://dl.acm.org/doi/10.1145/3219819.3219895) | 短期注意力记忆优先 |
191
- | **ComiRec** | [KDD 2020](https://arxiv.org/abs/2005.09347) | 可控多兴趣推荐 |
180
+ | 模型 | 论文 | 简介 |
181
+ | -------------- | ------------------------------------------------------------------------------ | ------------------ |
182
+ | **DSSM** | [CIKM 2013](https://posenhuang.github.io/papers/cikm2013_DSSM_fullversion.pdf) | 经典双塔召回模型 |
183
+ | **YoutubeDNN** | [RecSys 2016](https://dl.acm.org/doi/10.1145/2959100.2959190) | YouTube 深度召回 |
184
+ | **YoutubeSBC** | [RecSys 2019](https://dl.acm.org/doi/10.1145/3298689.3346997) | 采样偏差校正版本 |
185
+ | **MIND** | [CIKM 2019](https://arxiv.org/abs/1904.08030) | 多兴趣动态路由 |
186
+ | **SINE** | [WSDM 2021](https://arxiv.org/abs/2103.06920) | 稀疏兴趣网络 |
187
+ | **GRU4Rec** | [ICLR 2016](https://arxiv.org/abs/1511.06939) | GRU 序列推荐 |
188
+ | **SASRec** | [ICDM 2018](https://arxiv.org/abs/1808.09781) | 自注意力序列推荐 |
189
+ | **NARM** | [CIKM 2017](https://arxiv.org/abs/1711.04725) | 神经注意力会话推荐 |
190
+ | **STAMP** | [KDD 2018](https://dl.acm.org/doi/10.1145/3219819.3219895) | 短期注意力记忆优先 |
191
+ | **ComiRec** | [KDD 2020](https://arxiv.org/abs/2005.09347) | 可控多兴趣推荐 |
192
192
 
193
193
  ### 多任务模型 (Multi-Task Models) - 5个
194
194
 
195
- | 模型 | 论文 | 简介 |
196
- |------|------|------|
197
- | **ESMM** | [SIGIR 2018](https://arxiv.org/abs/1804.07931) | 全空间多任务建模 |
198
- | **MMoE** | [KDD 2018](https://dl.acm.org/doi/10.1145/3219819.3220007) | 多门控专家混合 |
199
- | **PLE** | [RecSys 2020](https://dl.acm.org/doi/10.1145/3383313.3412236) | 渐进式分层提取 |
200
- | **AITM** | [KDD 2021](https://arxiv.org/abs/2105.08489) | 自适应信息迁移 |
201
- | **SharedBottom** | - | 经典多任务共享底层 |
195
+ | 模型 | 论文 | 简介 |
196
+ | ---------------- | ------------------------------------------------------------- | ------------------ |
197
+ | **ESMM** | [SIGIR 2018](https://arxiv.org/abs/1804.07931) | 全空间多任务建模 |
198
+ | **MMoE** | [KDD 2018](https://dl.acm.org/doi/10.1145/3219819.3220007) | 多门控专家混合 |
199
+ | **PLE** | [RecSys 2020](https://dl.acm.org/doi/10.1145/3383313.3412236) | 渐进式分层提取 |
200
+ | **AITM** | [KDD 2021](https://arxiv.org/abs/2105.08489) | 自适应信息迁移 |
201
+ | **SharedBottom** | - | 经典多任务共享底层 |
202
202
 
203
203
  ### 生成式推荐 (Generative Recommendation) - 2个
204
204
 
205
- | 模型 | 论文 | 简介 |
206
- |------|------|------|
205
+ | 模型 | 论文 | 简介 |
206
+ | -------- | --------------------------------------------- | -------------------------------------------- |
207
207
  | **HSTU** | [Meta 2024](https://arxiv.org/abs/2402.17152) | 层级序列转换单元,支撑 Meta 万亿参数推荐系统 |
208
- | **HLLM** | [2024](https://arxiv.org/abs/2409.12740) | 层级大语言模型推荐,融合 LLM 语义理解能力 |
208
+ | **HLLM** | [2024](https://arxiv.org/abs/2409.12740) | 层级大语言模型推荐,融合 LLM 语义理解能力 |
209
209
 
210
210
  ## 📊 支持的数据集
211
211
 
@@ -293,11 +293,19 @@ model = DSSM(user_features, item_features, temperature=0.02,
293
293
  match_trainer = MatchTrainer(model)
294
294
  match_trainer.fit(train_dl)
295
295
  match_trainer.export_onnx("dssm.onnx")
296
- # 双塔模型可分别导出用户塔和物品塔:
296
+ # 双塔模型可分别导出用户塔和物品塔:
297
297
  # match_trainer.export_onnx("user_tower.onnx", mode="user")
298
298
  # match_trainer.export_onnx("dssm_item.onnx", tower="item")
299
299
  ```
300
300
 
301
+ ### 模型可视化
302
+
303
+ ```python
304
+ # 可视化模型架构(需要安装: pip install torch-rechub[visualization])
305
+ graph = ctr_trainer.visualization(depth=4) # 生成计算图
306
+ ctr_trainer.visualization(save_path="model.pdf", dpi=300) # 保存为高清 PDF
307
+ ```
308
+
301
309
  ## 👨‍💻‍ 贡献者
302
310
 
303
311
  感谢所有的贡献者!
@@ -4,7 +4,7 @@ build-backend = "hatchling.build"
4
4
 
5
5
  [project]
6
6
  name = "torch-rechub"
7
- version = "0.0.4"
7
+ version = "0.0.5"
8
8
  description = "A Pytorch Toolbox for Recommendation Models, Easy-to-use and Easy-to-extend."
9
9
  readme = "README.md"
10
10
  license = { text = "MIT" }
@@ -40,6 +40,10 @@ onnx = [
40
40
  "onnx>=1.12.0",
41
41
  "onnxruntime>=1.12.0",
42
42
  ]
43
+ visualization = [
44
+ "torchview>=0.2.6",
45
+ "graphviz>=0.20",
46
+ ]
43
47
  dev = [
44
48
  "pytest>=6.0",
45
49
  "pytest-cov>=2.0",
@@ -189,3 +189,100 @@ class CTRTrainer(object):
189
189
 
190
190
  exporter = ONNXExporter(model, device=export_device)
191
191
  return exporter.export(output_path=output_path, dummy_input=dummy_input, batch_size=batch_size, seq_length=seq_length, opset_version=opset_version, dynamic_batch=dynamic_batch, verbose=verbose)
192
+
193
+ def visualization(self, input_data=None, batch_size=2, seq_length=10, depth=3, show_shapes=True, expand_nested=True, save_path=None, graph_name="model", device=None, dpi=300, **kwargs):
194
+ """Visualize the model's computation graph.
195
+
196
+ This method generates a visual representation of the model architecture,
197
+ showing layer connections, tensor shapes, and nested module structures.
198
+ It automatically extracts feature information from the model.
199
+
200
+ Parameters
201
+ ----------
202
+ input_data : dict, optional
203
+ Example input dict {feature_name: tensor}.
204
+ If not provided, dummy inputs will be generated automatically.
205
+ batch_size : int, default=2
206
+ Batch size for auto-generated dummy input.
207
+ seq_length : int, default=10
208
+ Sequence length for SequenceFeature.
209
+ depth : int, default=3
210
+ Visualization depth, higher values show more detail.
211
+ Set to -1 to show all layers.
212
+ show_shapes : bool, default=True
213
+ Whether to display tensor shapes.
214
+ expand_nested : bool, default=True
215
+ Whether to expand nested modules.
216
+ save_path : str, optional
217
+ Path to save the graph image (.pdf, .svg, .png).
218
+ If None, displays in Jupyter or opens system viewer.
219
+ graph_name : str, default="model"
220
+ Name for the graph.
221
+ device : str, optional
222
+ Device for model execution. If None, defaults to 'cpu'.
223
+ dpi : int, default=300
224
+ Resolution in dots per inch for output image.
225
+ Higher values produce sharper images suitable for papers.
226
+ **kwargs : dict
227
+ Additional arguments passed to torchview.draw_graph().
228
+
229
+ Returns
230
+ -------
231
+ ComputationGraph
232
+ A torchview ComputationGraph object.
233
+
234
+ Raises
235
+ ------
236
+ ImportError
237
+ If torchview or graphviz is not installed.
238
+
239
+ Notes
240
+ -----
241
+ Default Display Behavior:
242
+ When `save_path` is None (default):
243
+ - In Jupyter/IPython: automatically displays the graph inline
244
+ - In Python script: opens the graph with system default viewer
245
+
246
+ Examples
247
+ --------
248
+ >>> trainer = CTRTrainer(model, ...)
249
+ >>> trainer.fit(train_dl, val_dl)
250
+ >>>
251
+ >>> # Auto-display in Jupyter (no save_path needed)
252
+ >>> trainer.visualization(depth=4)
253
+ >>>
254
+ >>> # Save to high-DPI PNG for papers
255
+ >>> trainer.visualization(save_path="model.png", dpi=300)
256
+ """
257
+ from ..utils.visualization import TORCHVIEW_AVAILABLE, visualize_model
258
+
259
+ if not TORCHVIEW_AVAILABLE:
260
+ raise ImportError(
261
+ "Visualization requires torchview. "
262
+ "Install with: pip install torch-rechub[visualization]\n"
263
+ "Also ensure graphviz is installed on your system:\n"
264
+ " - Ubuntu/Debian: sudo apt-get install graphviz\n"
265
+ " - macOS: brew install graphviz\n"
266
+ " - Windows: choco install graphviz"
267
+ )
268
+
269
+ # Handle DataParallel wrapped model
270
+ model = self.model.module if hasattr(self.model, 'module') else self.model
271
+
272
+ # Use provided device or default to 'cpu'
273
+ viz_device = device if device is not None else 'cpu'
274
+
275
+ return visualize_model(
276
+ model,
277
+ input_data=input_data,
278
+ batch_size=batch_size,
279
+ seq_length=seq_length,
280
+ depth=depth,
281
+ show_shapes=show_shapes,
282
+ expand_nested=expand_nested,
283
+ save_path=save_path,
284
+ graph_name=graph_name,
285
+ device=viz_device,
286
+ dpi=dpi,
287
+ **kwargs
288
+ )