replay-rec 0.19.0__tar.gz → 0.20.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (143) hide show
  1. replay_rec-0.20.0/NOTICE +41 -0
  2. {replay_rec-0.19.0 → replay_rec-0.20.0}/PKG-INFO +56 -32
  3. {replay_rec-0.19.0 → replay_rec-0.20.0}/README.md +26 -1
  4. {replay_rec-0.19.0 → replay_rec-0.20.0}/pyproject.toml +63 -58
  5. replay_rec-0.20.0/replay/__init__.py +7 -0
  6. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/data/dataset.py +9 -9
  7. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/data/nn/sequence_tokenizer.py +44 -38
  8. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/data/nn/sequential_dataset.py +13 -8
  9. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/data/nn/torch_sequential_dataset.py +14 -13
  10. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/data/nn/utils.py +1 -1
  11. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/metrics/base_metric.py +1 -1
  12. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/metrics/coverage.py +7 -11
  13. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/metrics/experiment.py +3 -3
  14. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/metrics/offline_metrics.py +2 -2
  15. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/__init__.py +19 -0
  16. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/association_rules.py +1 -4
  17. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/base_neighbour_rec.py +6 -9
  18. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/base_rec.py +44 -293
  19. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/cat_pop_rec.py +2 -1
  20. replay_rec-0.20.0/replay/models/common.py +69 -0
  21. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/extensions/ann/ann_mixin.py +30 -25
  22. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/extensions/ann/index_builders/driver_hnswlib_index_builder.py +1 -1
  23. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/extensions/ann/utils.py +4 -3
  24. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/knn.py +18 -17
  25. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/nn/sequential/bert4rec/dataset.py +1 -1
  26. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/nn/sequential/callbacks/prediction_callbacks.py +2 -2
  27. replay_rec-0.20.0/replay/models/nn/sequential/compiled/__init__.py +15 -0
  28. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/nn/sequential/compiled/base_compiled_model.py +3 -1
  29. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/nn/sequential/compiled/bert4rec_compiled.py +11 -2
  30. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/nn/sequential/compiled/sasrec_compiled.py +5 -1
  31. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/nn/sequential/sasrec/dataset.py +1 -1
  32. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/nn/sequential/sasrec/model.py +1 -1
  33. replay_rec-0.20.0/replay/models/optimization/__init__.py +14 -0
  34. replay_rec-0.20.0/replay/models/optimization/optuna_mixin.py +279 -0
  35. {replay_rec-0.19.0/replay → replay_rec-0.20.0/replay/models}/optimization/optuna_objective.py +13 -15
  36. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/slim.py +2 -4
  37. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/word2vec.py +7 -12
  38. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/preprocessing/discretizer.py +1 -2
  39. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/preprocessing/history_based_fp.py +1 -1
  40. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/preprocessing/label_encoder.py +1 -1
  41. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/splitters/cold_user_random_splitter.py +13 -7
  42. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/splitters/last_n_splitter.py +17 -10
  43. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/utils/__init__.py +6 -2
  44. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/utils/common.py +4 -2
  45. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/utils/model_handler.py +11 -31
  46. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/utils/session_handler.py +2 -2
  47. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/utils/spark_utils.py +2 -2
  48. replay_rec-0.20.0/replay/utils/types.py +48 -0
  49. replay_rec-0.20.0/replay/utils/warnings.py +26 -0
  50. replay_rec-0.19.0/replay/__init__.py +0 -3
  51. replay_rec-0.19.0/replay/models/nn/sequential/compiled/__init__.py +0 -5
  52. replay_rec-0.19.0/replay/optimization/__init__.py +0 -5
  53. replay_rec-0.19.0/replay/utils/types.py +0 -38
  54. {replay_rec-0.19.0 → replay_rec-0.20.0}/LICENSE +0 -0
  55. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/data/__init__.py +0 -0
  56. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/data/dataset_utils/__init__.py +0 -0
  57. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/data/dataset_utils/dataset_label_encoder.py +0 -0
  58. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/data/nn/__init__.py +6 -6
  59. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/data/nn/schema.py +0 -0
  60. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/data/schema.py +0 -0
  61. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/data/spark_schema.py +0 -0
  62. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/metrics/__init__.py +0 -0
  63. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/metrics/categorical_diversity.py +0 -0
  64. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/metrics/descriptors.py +0 -0
  65. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/metrics/hitrate.py +0 -0
  66. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/metrics/map.py +0 -0
  67. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/metrics/mrr.py +0 -0
  68. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/metrics/ndcg.py +0 -0
  69. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/metrics/novelty.py +0 -0
  70. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/metrics/precision.py +0 -0
  71. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/metrics/recall.py +0 -0
  72. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/metrics/rocauc.py +0 -0
  73. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/metrics/surprisal.py +0 -0
  74. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/metrics/torch_metrics_builder.py +0 -0
  75. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/metrics/unexpectedness.py +0 -0
  76. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/als.py +0 -0
  77. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/cluster.py +0 -0
  78. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/extensions/__init__.py +0 -0
  79. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/extensions/ann/__init__.py +0 -0
  80. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/extensions/ann/entities/__init__.py +0 -0
  81. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/extensions/ann/entities/base_hnsw_param.py +0 -0
  82. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/extensions/ann/entities/hnswlib_param.py +0 -0
  83. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/extensions/ann/entities/nmslib_hnsw_param.py +0 -0
  84. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/extensions/ann/index_builders/__init__.py +0 -0
  85. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/extensions/ann/index_builders/base_index_builder.py +0 -0
  86. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/extensions/ann/index_builders/driver_nmslib_index_builder.py +0 -0
  87. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/extensions/ann/index_builders/executor_hnswlib_index_builder.py +0 -0
  88. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/extensions/ann/index_builders/executor_nmslib_index_builder.py +0 -0
  89. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/extensions/ann/index_builders/nmslib_index_builder_mixin.py +0 -0
  90. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/extensions/ann/index_inferers/__init__.py +0 -0
  91. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/extensions/ann/index_inferers/base_inferer.py +0 -0
  92. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/extensions/ann/index_inferers/hnswlib_filter_index_inferer.py +0 -0
  93. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/extensions/ann/index_inferers/hnswlib_index_inferer.py +0 -0
  94. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/extensions/ann/index_inferers/nmslib_filter_index_inferer.py +0 -0
  95. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/extensions/ann/index_inferers/nmslib_index_inferer.py +0 -0
  96. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/extensions/ann/index_inferers/utils.py +0 -0
  97. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/extensions/ann/index_stores/__init__.py +0 -0
  98. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/extensions/ann/index_stores/base_index_store.py +0 -0
  99. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/extensions/ann/index_stores/hdfs_index_store.py +0 -0
  100. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/extensions/ann/index_stores/shared_disk_index_store.py +0 -0
  101. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/extensions/ann/index_stores/spark_files_index_store.py +0 -0
  102. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/extensions/ann/index_stores/utils.py +0 -0
  103. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/kl_ucb.py +0 -0
  104. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/lin_ucb.py +0 -0
  105. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/nn/__init__.py +0 -0
  106. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/nn/loss/__init__.py +0 -0
  107. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/nn/loss/sce.py +0 -0
  108. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/nn/optimizer_utils/__init__.py +0 -0
  109. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/nn/optimizer_utils/optimizer_factory.py +0 -0
  110. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/nn/sequential/__init__.py +0 -0
  111. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/nn/sequential/bert4rec/__init__.py +0 -0
  112. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/nn/sequential/bert4rec/lightning.py +0 -0
  113. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/nn/sequential/bert4rec/model.py +0 -0
  114. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/nn/sequential/callbacks/__init__.py +0 -0
  115. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/nn/sequential/callbacks/validation_callback.py +0 -0
  116. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/nn/sequential/postprocessors/__init__.py +0 -0
  117. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/nn/sequential/postprocessors/_base.py +0 -0
  118. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/nn/sequential/postprocessors/postprocessors.py +0 -0
  119. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/nn/sequential/sasrec/__init__.py +0 -0
  120. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/nn/sequential/sasrec/lightning.py +0 -0
  121. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/pop_rec.py +0 -0
  122. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/query_pop_rec.py +0 -0
  123. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/random_rec.py +0 -0
  124. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/thompson_sampling.py +0 -0
  125. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/ucb.py +0 -0
  126. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/models/wilson.py +0 -0
  127. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/preprocessing/__init__.py +0 -0
  128. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/preprocessing/converter.py +0 -0
  129. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/preprocessing/filters.py +0 -0
  130. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/preprocessing/sessionizer.py +0 -0
  131. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/scenarios/__init__.py +0 -0
  132. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/scenarios/fallback.py +0 -0
  133. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/splitters/__init__.py +0 -0
  134. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/splitters/base_splitter.py +0 -0
  135. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/splitters/k_folds.py +0 -0
  136. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/splitters/new_users_splitter.py +0 -0
  137. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/splitters/random_splitter.py +0 -0
  138. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/splitters/ratio_splitter.py +0 -0
  139. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/splitters/time_splitter.py +0 -0
  140. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/splitters/two_stage_splitter.py +0 -0
  141. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/utils/dataframe_bucketizer.py +0 -0
  142. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/utils/distributions.py +0 -0
  143. {replay_rec-0.19.0 → replay_rec-0.20.0}/replay/utils/time.py +0 -0
@@ -0,0 +1,41 @@
1
+ Apache Spark
2
+ Copyright 2014 and onwards The Apache Software Foundation.
3
+
4
+ This product includes software developed at
5
+ The Apache Software Foundation (http://www.apache.org/).
6
+
7
+
8
+ Export Control Notice
9
+ ---------------------
10
+
11
+ This distribution includes cryptographic software. The country in which you currently reside may have
12
+ restrictions on the import, possession, use, and/or re-export to another country, of encryption software.
13
+ BEFORE using any encryption software, please check your country's laws, regulations and policies concerning
14
+ the import, possession, or use, and re-export of encryption software, to see if this is permitted. See
15
+ <http://www.wassenaar.org/> for more information.
16
+
17
+ The U.S. Government Department of Commerce, Bureau of Industry and Security (BIS), has classified this
18
+ software as Export Commodity Control Number (ECCN) 5D002.C.1, which includes information security software
19
+ using or performing cryptographic functions with asymmetric algorithms. The form and manner of this Apache
20
+ Software Foundation distribution makes it eligible for export under the License Exception ENC Technology
21
+ Software Unrestricted (TSU) exception (see the BIS Export Administration Regulations, Section 740.13) for
22
+ both object code and source code.
23
+
24
+ The following provides more details on the included cryptographic software:
25
+
26
+ This software uses Apache Commons Crypto (https://commons.apache.org/proper/commons-crypto/) to
27
+ support authentication, and encryption and decryption of data sent across the network between
28
+ services.
29
+
30
+
31
+ Metrics
32
+ Copyright 2010-2013 Coda Hale and Yammer, Inc.
33
+
34
+ This product includes software developed by Coda Hale and Yammer, Inc.
35
+
36
+ This product includes code derived from the JSR-166 project (ThreadLocalRandom, Striped64,
37
+ LongAdder), which was released with the following comments:
38
+
39
+ Written by Doug Lea with assistance from members of JCP JSR-166
40
+ Expert Group and released to the public domain, as explained at
41
+ http://creativecommons.org/publicdomain/zero/1.0/
@@ -1,45 +1,44 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.4
2
2
  Name: replay-rec
3
- Version: 0.19.0
3
+ Version: 0.20.0
4
4
  Summary: RecSys Library
5
- Home-page: https://sb-ai-lab.github.io/RePlay/
6
- License: Apache-2.0
5
+ License-Expression: Apache-2.0
6
+ License-File: LICENSE
7
+ License-File: NOTICE
7
8
  Author: AI Lab
8
- Requires-Python: >=3.8.1,<3.12
9
+ Requires-Python: >=3.9, <3.13
10
+ Classifier: Operating System :: Unix
9
11
  Classifier: Development Status :: 4 - Beta
10
12
  Classifier: Environment :: Console
11
13
  Classifier: Intended Audience :: Developers
12
14
  Classifier: Intended Audience :: Science/Research
13
- Classifier: License :: OSI Approved :: Apache Software License
14
15
  Classifier: Natural Language :: English
15
- Classifier: Operating System :: Unix
16
- Classifier: Programming Language :: Python :: 3
17
- Classifier: Programming Language :: Python :: 3.9
18
- Classifier: Programming Language :: Python :: 3.10
19
- Classifier: Programming Language :: Python :: 3.11
20
16
  Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
21
- Provides-Extra: all
22
17
  Provides-Extra: spark
23
18
  Provides-Extra: torch
24
- Provides-Extra: torch-openvino
25
- Requires-Dist: fixed-install-nmslib (==2.1.2)
26
- Requires-Dist: hnswlib (>=0.7.0,<0.8.0)
27
- Requires-Dist: lightning (>=2.0.2,<=2.4.0) ; extra == "torch" or extra == "torch-openvino" or extra == "all"
28
- Requires-Dist: numpy (>=1.20.0)
29
- Requires-Dist: onnx (>=1.16.2,<1.17.0) ; extra == "torch-openvino" or extra == "all"
30
- Requires-Dist: openvino (>=2024.3.0,<2024.4.0) ; extra == "torch-openvino" or extra == "all"
31
- Requires-Dist: optuna (>=3.2.0,<3.3.0)
32
- Requires-Dist: pandas (>=1.3.5,<=2.2.2)
33
- Requires-Dist: polars (>=1.0.0,<1.1.0)
34
- Requires-Dist: psutil (>=6.0.0,<6.1.0)
35
- Requires-Dist: pyarrow (>=12.0.1)
36
- Requires-Dist: pyspark (>=3.0,<3.6) ; (python_full_version >= "3.8.1" and python_version < "3.11") and (extra == "spark" or extra == "all")
37
- Requires-Dist: pyspark (>=3.4,<3.6) ; (python_version >= "3.11" and python_version < "3.12") and (extra == "spark" or extra == "all")
38
- Requires-Dist: pytorch-ranger (>=0.1.1,<0.2.0) ; extra == "torch" or extra == "torch-openvino" or extra == "all"
39
- Requires-Dist: scikit-learn (>=1.0.2,<2.0.0)
40
- Requires-Dist: scipy (>=1.8.1,<2.0.0)
41
- Requires-Dist: torch (>=1.8,<3.0.0) ; (python_version >= "3.9") and (extra == "torch" or extra == "torch-openvino" or extra == "all")
42
- Requires-Dist: torch (>=1.8,<=2.4.1) ; (python_version >= "3.8" and python_version < "3.9") and (extra == "torch" or extra == "torch-openvino" or extra == "all")
19
+ Provides-Extra: torch-cpu
20
+ Requires-Dist: lightning (<2.6.0) ; extra == "torch" or extra == "torch-cpu"
21
+ Requires-Dist: lightning ; extra == "torch"
22
+ Requires-Dist: lightning ; extra == "torch-cpu"
23
+ Requires-Dist: numpy (>=1.20.0,<2)
24
+ Requires-Dist: pandas (>=1.3.5,<2.4.0)
25
+ Requires-Dist: polars (<2.0)
26
+ Requires-Dist: psutil (<=7.0.0) ; extra == "spark"
27
+ Requires-Dist: psutil ; extra == "spark"
28
+ Requires-Dist: pyarrow (<22.0)
29
+ Requires-Dist: pyspark (>=3.0,<3.5) ; extra == "spark"
30
+ Requires-Dist: pyspark ; extra == "spark"
31
+ Requires-Dist: pytorch-optimizer (>=3.8.0,<3.9.0) ; extra == "torch" or extra == "torch-cpu"
32
+ Requires-Dist: pytorch-optimizer ; extra == "torch"
33
+ Requires-Dist: pytorch-optimizer ; extra == "torch-cpu"
34
+ Requires-Dist: scikit-learn (>=1.6.1,<1.7.0)
35
+ Requires-Dist: scipy (>=1.13.1,<1.14)
36
+ Requires-Dist: setuptools
37
+ Requires-Dist: torch (>=1.8,<3.0.0) ; extra == "torch" or extra == "torch-cpu"
38
+ Requires-Dist: torch ; extra == "torch"
39
+ Requires-Dist: torch ; extra == "torch-cpu"
40
+ Requires-Dist: tqdm (>=4.67,<5)
41
+ Project-URL: Homepage, https://sb-ai-lab.github.io/RePlay/
43
42
  Project-URL: Repository, https://github.com/sb-ai-lab/RePlay
44
43
  Description-Content-Type: text/markdown
45
44
 
@@ -208,7 +207,6 @@ pip install replay-rec==XX.YY.ZZrc0
208
207
  In addition to the core package, several extras are also provided, including:
209
208
  - `[spark]`: Install PySpark functionality
210
209
  - `[torch]`: Install PyTorch and Lightning functionality
211
- - `[all]`: `[spark]` `[torch]`
212
210
 
213
211
  Example:
214
212
  ```bash
@@ -219,9 +217,35 @@ pip install replay-rec[spark]
219
217
  pip install replay-rec[spark]==XX.YY.ZZrc0
220
218
  ```
221
219
 
220
+ Additionally, `replay-rec[torch]` may be installed with CPU-only version of `torch` by providing its respective index URL during installation:
221
+ ```bash
222
+ # Install package with the CPU version of torch
223
+ pip install replay-rec[torch] --extra-index-url https://download.pytorch.org/whl/cpu
224
+ ```
225
+
226
+
222
227
  To build RePlay from sources please use the [instruction](CONTRIBUTING.md#installing-from-the-source).
223
228
 
224
229
 
230
+ ### Optional features
231
+ RePlay includes a set of optional features which require users to install optional dependencies manually. These features include:
232
+
233
+ 1) Hyperpearameter search via Optuna:
234
+ ```bash
235
+ pip install optuna
236
+ ```
237
+
238
+ 2) Model compilation via OpenVINO:
239
+ ```bash
240
+ pip install openvino onnx
241
+ ```
242
+
243
+ 3) Vector database and hierarchical search support:
244
+ ```bash
245
+ pip install hnswlib fixed-install-nmslib
246
+ ```
247
+
248
+
225
249
  <a name="examples"></a>
226
250
  ## 📑 Resources
227
251
 
@@ -163,7 +163,6 @@ pip install replay-rec==XX.YY.ZZrc0
163
163
  In addition to the core package, several extras are also provided, including:
164
164
  - `[spark]`: Install PySpark functionality
165
165
  - `[torch]`: Install PyTorch and Lightning functionality
166
- - `[all]`: `[spark]` `[torch]`
167
166
 
168
167
  Example:
169
168
  ```bash
@@ -174,9 +173,35 @@ pip install replay-rec[spark]
174
173
  pip install replay-rec[spark]==XX.YY.ZZrc0
175
174
  ```
176
175
 
176
+ Additionally, `replay-rec[torch]` may be installed with CPU-only version of `torch` by providing its respective index URL during installation:
177
+ ```bash
178
+ # Install package with the CPU version of torch
179
+ pip install replay-rec[torch] --extra-index-url https://download.pytorch.org/whl/cpu
180
+ ```
181
+
182
+
177
183
  To build RePlay from sources please use the [instruction](CONTRIBUTING.md#installing-from-the-source).
178
184
 
179
185
 
186
+ ### Optional features
187
+ RePlay includes a set of optional features which require users to install optional dependencies manually. These features include:
188
+
189
+ 1) Hyperpearameter search via Optuna:
190
+ ```bash
191
+ pip install optuna
192
+ ```
193
+
194
+ 2) Model compilation via OpenVINO:
195
+ ```bash
196
+ pip install openvino onnx
197
+ ```
198
+
199
+ 3) Vector database and hierarchical search support:
200
+ ```bash
201
+ pip install hnswlib fixed-install-nmslib
202
+ ```
203
+
204
+
180
205
  <a name="examples"></a>
181
206
  ## 📑 Resources
182
207
 
@@ -1,35 +1,28 @@
1
1
  [build-system]
2
2
  requires = [
3
- "poetry-core>=1.0.0",
3
+ "poetry-core>=2.0.0",
4
4
  "poetry-dynamic-versioning>=1.0.0,<2.0.0",
5
+ "setuptools",
5
6
  ]
6
7
  build-backend = "poetry_dynamic_versioning.backend"
7
8
 
8
- [tool.black]
9
- line-length = 120
10
- target-versions = ["py38", "py39", "py310", "py311"]
11
-
12
- [tool.poetry]
9
+ [project]
13
10
  name = "replay-rec"
14
- packages = [{include = "replay"}]
15
11
  license = "Apache-2.0"
16
12
  description = "RecSys Library"
17
13
  authors = [
18
- "AI Lab",
19
- "Alexey Vasilev",
20
- "Anna Volodkevich",
21
- "Alexey Grishanov",
22
- "Yan-Martin Tamm",
23
- "Boris Shminke",
24
- "Alexander Sidorenko",
25
- "Roza Aysina",
14
+ {name = "AI Lab"},
15
+ {name = "Alexey Vasilev"},
16
+ {name = "Anna Volodkevich"},
17
+ {name = "Alexey Grishanov"},
18
+ {name = "Yan-Martin Tamm"},
19
+ {name = "Boris Shminke"},
20
+ {name = "Alexander Sidorenko"},
21
+ {name = "Roza Aysina"},
26
22
  ]
27
23
  readme = "README.md"
28
- homepage = "https://sb-ai-lab.github.io/RePlay/"
29
- repository = "https://github.com/sb-ai-lab/RePlay"
30
24
  classifiers = [
31
25
  "Operating System :: Unix",
32
- "Intended Audience :: Science/Research",
33
26
  "Development Status :: 4 - Beta",
34
27
  "Environment :: Console",
35
28
  "Intended Audience :: Developers",
@@ -37,44 +30,53 @@ classifiers = [
37
30
  "Natural Language :: English",
38
31
  "Topic :: Scientific/Engineering :: Artificial Intelligence",
39
32
  ]
33
+ requires-python = ">=3.9, <3.13"
34
+ dependencies = [
35
+ "setuptools",
36
+ "numpy (>=1.20.0,<2)",
37
+ "pandas (>=1.3.5,<2.4.0)",
38
+ "polars (<2.0)",
39
+ "scipy (>=1.13.1,<1.14)",
40
+ "scikit-learn (>=1.6.1,<1.7.0)",
41
+ "pyarrow (<22.0)",
42
+ "tqdm (>=4.67,<5)",
43
+ "pyspark (>=3.0,<3.5); extra == 'spark'",
44
+ "psutil (<=7.0.0); extra == 'spark'",
45
+ "torch (>=1.8, <3.0.0); extra == 'torch' or extra == 'torch-cpu'",
46
+ "pytorch-optimizer (>=3.8.0,<3.9.0); extra == 'torch' or extra == 'torch-cpu'",
47
+ "lightning (<2.6.0); extra == 'torch' or extra == 'torch-cpu'",
48
+ ]
49
+ dynamic = ["dependencies"]
50
+ version = "0.20.0"
51
+
52
+ [project.optional-dependencies]
53
+ spark = ["pyspark", "psutil"]
54
+ torch = ["torch", "pytorch-optimizer", "lightning"]
55
+ torch-cpu = ["torch", "pytorch-optimizer", "lightning"]
56
+
57
+ [project.urls]
58
+ homepage = "https://sb-ai-lab.github.io/RePlay/"
59
+ repository = "https://github.com/sb-ai-lab/RePlay"
60
+
61
+ [tool.black]
62
+ line-length = 120
63
+ target-version = ["py39", "py310", "py311", "py312"]
64
+
65
+ [tool.poetry]
66
+ packages = [{include = "replay"}]
40
67
  exclude = [
41
68
  "replay/conftest.py",
42
69
  "replay/experimental",
43
70
  ]
44
- version = "0.19.0"
45
71
 
46
72
  [tool.poetry.dependencies]
47
- python = ">=3.8.1, <3.12"
48
- numpy = ">=1.20.0"
49
- pandas = ">=1.3.5, <=2.2.2"
50
- polars = "~1.0.0"
51
- optuna = "~3.2.0"
52
- scipy = "^1.8.1"
53
- psutil = "~6.0.0"
54
- scikit-learn = "^1.0.2"
55
- pyarrow = ">=12.0.1"
56
- openvino = {version = "~2024.3.0", optional = true}
57
- onnx = {version = "~1.16.2", optional = true}
58
- pyspark = [
59
- {version = ">=3.4,<3.6", python = ">=3.11,<3.12", optional = true},
60
- {version = ">=3.0,<3.6", python = ">=3.8.1,<3.11", optional = true},
61
- ]
62
73
  torch = [
63
- {version = ">=1.8, <3.0.0", python = ">=3.9", optional = true},
64
- {version = ">=1.8, <=2.4.1", python = ">=3.8,<3.9", optional = true},
74
+ {markers = "extra == 'torch-cpu' and extra !='torch'", source = "torch-cpu-mirror"},
75
+ {markers = "extra == 'torch' and extra !='torch-cpu'", source = "PyPI"},
65
76
  ]
66
- lightning = {version = ">=2.0.2, <=2.4.0", optional = true}
67
- pytorch-ranger = {version = "^0.1.1", optional = true}
68
- fixed-install-nmslib = "2.1.2"
69
- hnswlib = "^0.7.0"
70
-
71
- [tool.poetry.extras]
72
- spark = ["pyspark"]
73
- torch = ["torch", "pytorch-ranger", "lightning"]
74
- torch-openvino = ["torch", "pytorch-ranger", "lightning", "openvino", "onnx"]
75
- all = ["pyspark", "torch", "pytorch-ranger", "lightning", "openvino", "onnx"]
76
77
 
77
78
  [tool.poetry.group.dev.dependencies]
79
+ coverage-conditional-plugin = "^0.9.0"
78
80
  jupyter = "~1.0.0"
79
81
  jupyterlab = "^3.6.0"
80
82
  pytest = ">=7.1.0"
@@ -93,33 +95,36 @@ docutils = "0.16"
93
95
  data-science-types = "0.2.23"
94
96
  filelock = "~3.14.0"
95
97
 
98
+ [[tool.poetry.source]]
99
+ name = "torch-cpu-mirror"
100
+ url = "https://download.pytorch.org/whl/cpu"
101
+ priority = "explicit"
102
+
96
103
  [tool.poetry-dynamic-versioning]
97
104
  enable = false
98
- format-jinja = """0.19.0{{ env['PACKAGE_SUFFIX'] }}"""
105
+ format-jinja = """0.20.0{{ env['PACKAGE_SUFFIX'] }}"""
99
106
  vcs = "git"
100
107
 
101
108
  [tool.ruff]
102
109
  exclude = [".git", ".venv", "__pycache__", "env", "venv", "docs", "projects", "examples"]
103
- extend-select = ["C90", "T10", "T20", "UP004"]
104
110
  line-length = 120
111
+
112
+ [tool.ruff.lint]
105
113
  select = ["ARG", "C4", "E", "EM", "ERA", "F", "FLY", "I", "INP", "ISC", "N", "PERF", "PGH", "PIE", "PYI", "Q", "RUF", "SIM", "TID", "W"]
114
+ extend-select = ["C90", "T10", "T20", "UP004"]
115
+ ignore = ["SIM115"]
116
+ mccabe = {max-complexity = 13}
117
+ isort = {combine-as-imports = true, force-wrap-aliases = true}
106
118
 
107
- [tool.ruff.flake8-quotes]
119
+ [tool.ruff.lint.flake8-quotes]
108
120
  docstring-quotes = "double"
109
121
  inline-quotes = "double"
110
122
  multiline-quotes = "double"
111
123
 
112
- [tool.ruff.flake8-unused-arguments]
124
+ [tool.ruff.lint.flake8-unused-arguments]
113
125
  ignore-variadic-names = false
114
126
 
115
- [tool.ruff.isort]
116
- combine-as-imports = true
117
- force-wrap-aliases = true
118
-
119
- [tool.ruff.mccabe]
120
- max-complexity = 13
121
-
122
- [tool.ruff.per-file-ignores]
127
+ [tool.ruff.lint.per-file-ignores]
123
128
  "*/" = ["PERF203", "RUF001", "RUF002", "RUF012", "E402"]
124
129
  "__init__.py" = ["F401"]
125
130
  "replay/utils/model_handler.py" = ["F403", "F405"]
@@ -0,0 +1,7 @@
1
+ """RecSys library"""
2
+
3
+ # NOTE: This ensures distutils monkey-patching is performed before any
4
+ # functionality removed in Python 3.12 is used in downstream packages (like lightfm)
5
+ import setuptools as _
6
+
7
+ __version__ = "0.20.0"
@@ -610,9 +610,9 @@ class Dataset:
610
610
  if self.is_pandas:
611
611
  try:
612
612
  data[column] = data[column].astype(int)
613
- except Exception:
613
+ except Exception as exc:
614
614
  msg = f"IDs in {source.name}.{column} are not encoded. They are not int."
615
- raise ValueError(msg)
615
+ raise ValueError(msg) from exc
616
616
 
617
617
  if self.is_pandas:
618
618
  is_int = np.issubdtype(dict(data.dtypes)[column], int)
@@ -775,10 +775,10 @@ def check_dataframes_types_equal(dataframe: DataFrameLike, other: DataFrameLike)
775
775
 
776
776
  :returns: True if dataframes have same type.
777
777
  """
778
- if isinstance(dataframe, PandasDataFrame) and isinstance(other, PandasDataFrame):
779
- return True
780
- if isinstance(dataframe, SparkDataFrame) and isinstance(other, SparkDataFrame):
781
- return True
782
- if isinstance(dataframe, PolarsDataFrame) and isinstance(other, PolarsDataFrame):
783
- return True
784
- return False
778
+ return any(
779
+ [
780
+ isinstance(dataframe, PandasDataFrame) and isinstance(other, PandasDataFrame),
781
+ isinstance(dataframe, SparkDataFrame) and isinstance(other, SparkDataFrame),
782
+ isinstance(dataframe, PolarsDataFrame) and isinstance(other, PolarsDataFrame),
783
+ ]
784
+ )