replay-rec 0.20.3__py3-none-any.whl → 0.20.3rc0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. replay/__init__.py +1 -1
  2. replay/experimental/__init__.py +0 -0
  3. replay/experimental/metrics/__init__.py +62 -0
  4. replay/experimental/metrics/base_metric.py +603 -0
  5. replay/experimental/metrics/coverage.py +97 -0
  6. replay/experimental/metrics/experiment.py +175 -0
  7. replay/experimental/metrics/hitrate.py +26 -0
  8. replay/experimental/metrics/map.py +30 -0
  9. replay/experimental/metrics/mrr.py +18 -0
  10. replay/experimental/metrics/ncis_precision.py +31 -0
  11. replay/experimental/metrics/ndcg.py +49 -0
  12. replay/experimental/metrics/precision.py +22 -0
  13. replay/experimental/metrics/recall.py +25 -0
  14. replay/experimental/metrics/rocauc.py +49 -0
  15. replay/experimental/metrics/surprisal.py +90 -0
  16. replay/experimental/metrics/unexpectedness.py +76 -0
  17. replay/experimental/models/__init__.py +50 -0
  18. replay/experimental/models/admm_slim.py +257 -0
  19. replay/experimental/models/base_neighbour_rec.py +200 -0
  20. replay/experimental/models/base_rec.py +1386 -0
  21. replay/experimental/models/base_torch_rec.py +234 -0
  22. replay/experimental/models/cql.py +454 -0
  23. replay/experimental/models/ddpg.py +932 -0
  24. replay/experimental/models/dt4rec/__init__.py +0 -0
  25. replay/experimental/models/dt4rec/dt4rec.py +189 -0
  26. replay/experimental/models/dt4rec/gpt1.py +401 -0
  27. replay/experimental/models/dt4rec/trainer.py +127 -0
  28. replay/experimental/models/dt4rec/utils.py +264 -0
  29. replay/experimental/models/extensions/spark_custom_models/__init__.py +0 -0
  30. replay/experimental/models/extensions/spark_custom_models/als_extension.py +792 -0
  31. replay/experimental/models/hierarchical_recommender.py +331 -0
  32. replay/experimental/models/implicit_wrap.py +131 -0
  33. replay/experimental/models/lightfm_wrap.py +303 -0
  34. replay/experimental/models/mult_vae.py +332 -0
  35. replay/experimental/models/neural_ts.py +986 -0
  36. replay/experimental/models/neuromf.py +406 -0
  37. replay/experimental/models/scala_als.py +293 -0
  38. replay/experimental/models/u_lin_ucb.py +115 -0
  39. replay/experimental/nn/data/__init__.py +1 -0
  40. replay/experimental/nn/data/schema_builder.py +102 -0
  41. replay/experimental/preprocessing/__init__.py +3 -0
  42. replay/experimental/preprocessing/data_preparator.py +839 -0
  43. replay/experimental/preprocessing/padder.py +229 -0
  44. replay/experimental/preprocessing/sequence_generator.py +208 -0
  45. replay/experimental/scenarios/__init__.py +1 -0
  46. replay/experimental/scenarios/obp_wrapper/__init__.py +8 -0
  47. replay/experimental/scenarios/obp_wrapper/obp_optuna_objective.py +74 -0
  48. replay/experimental/scenarios/obp_wrapper/replay_offline.py +261 -0
  49. replay/experimental/scenarios/obp_wrapper/utils.py +85 -0
  50. replay/experimental/scenarios/two_stages/__init__.py +0 -0
  51. replay/experimental/scenarios/two_stages/reranker.py +117 -0
  52. replay/experimental/scenarios/two_stages/two_stages_scenario.py +757 -0
  53. replay/experimental/utils/__init__.py +0 -0
  54. replay/experimental/utils/logger.py +24 -0
  55. replay/experimental/utils/model_handler.py +186 -0
  56. replay/experimental/utils/session_handler.py +44 -0
  57. {replay_rec-0.20.3.dist-info → replay_rec-0.20.3rc0.dist-info}/METADATA +11 -17
  58. {replay_rec-0.20.3.dist-info → replay_rec-0.20.3rc0.dist-info}/RECORD +61 -6
  59. {replay_rec-0.20.3.dist-info → replay_rec-0.20.3rc0.dist-info}/WHEEL +0 -0
  60. {replay_rec-0.20.3.dist-info → replay_rec-0.20.3rc0.dist-info}/licenses/LICENSE +0 -0
  61. {replay_rec-0.20.3.dist-info → replay_rec-0.20.3rc0.dist-info}/licenses/NOTICE +0 -0
File without changes
@@ -0,0 +1,24 @@
1
+ import logging
2
+
3
+
4
+ def get_logger(
5
+ name,
6
+ level=logging.INFO,
7
+ format_str="%(asctime)s [%(pathname)s:%(lineno)s - %(levelname)s ] %(message)s",
8
+ date_format="%Y-%m-%d %H:%M:%S",
9
+ file=False,
10
+ ):
11
+ """
12
+ Get python logger instance
13
+ """
14
+ logger = logging.getLogger(name)
15
+ logger.setLevel(level)
16
+
17
+ if not logger.hasHandlers():
18
+ handler = logging.StreamHandler() if not file else logging.FileHandler(name)
19
+ handler.setLevel(level)
20
+ formatter = logging.Formatter(fmt=format_str, datefmt=date_format)
21
+ handler.setFormatter(formatter)
22
+ logger.addHandler(handler)
23
+
24
+ return logger
@@ -0,0 +1,186 @@
1
+ import json
2
+ from inspect import getfullargspec
3
+ from os.path import join
4
+ from pathlib import Path
5
+ from typing import Union
6
+
7
+ from replay.experimental.models.base_rec import BaseRecommender
8
+ from replay.experimental.preprocessing import Indexer
9
+ from replay.utils import PYSPARK_AVAILABLE
10
+ from replay.utils.session_handler import State
11
+ from replay.utils.spark_utils import load_pickled_from_parquet, save_picklable_to_parquet
12
+
13
+ if PYSPARK_AVAILABLE:
14
+ import pyspark.sql.types as st
15
+ from pyspark.ml.feature import IndexToString, StringIndexerModel
16
+ from pyspark.sql import SparkSession
17
+
18
+ from replay.utils.model_handler import get_fs
19
+
20
+ def get_list_of_paths(spark: SparkSession, dir_path: str):
21
+ """
22
+ Returns list of paths to files in the `dir_path`
23
+
24
+ :param spark: spark session
25
+ :param dir_path: path to dir in hdfs or local disk
26
+ :return: list of paths to files
27
+ """
28
+ fs = get_fs(spark)
29
+ statuses = fs.listStatus(spark._jvm.org.apache.hadoop.fs.Path(dir_path))
30
+ return [str(f.getPath()) for f in statuses]
31
+
32
+
33
+ def save(model: BaseRecommender, path: Union[str, Path], overwrite: bool = False):
34
+ """
35
+ Save fitted model to disk as a folder
36
+
37
+ :param model: Trained recommender
38
+ :param path: destination where model files will be stored
39
+ :return:
40
+ """
41
+ if isinstance(path, Path):
42
+ path = str(path)
43
+
44
+ spark = State().session
45
+
46
+ fs = get_fs(spark)
47
+ if not overwrite:
48
+ is_exists = fs.exists(spark._jvm.org.apache.hadoop.fs.Path(path))
49
+ if is_exists:
50
+ msg = f"Path '{path}' already exists. Mode is 'overwrite = False'."
51
+ raise FileExistsError(msg)
52
+
53
+ fs.mkdirs(spark._jvm.org.apache.hadoop.fs.Path(path))
54
+ model._save_model(join(path, "model"))
55
+
56
+ init_args = model._init_args
57
+ init_args["_model_name"] = str(model)
58
+ sc = spark.sparkContext
59
+ df = spark.read.json(sc.parallelize([json.dumps(init_args)]))
60
+ df.coalesce(1).write.mode("overwrite").option("ignoreNullFields", "false").json(join(path, "init_args.json"))
61
+
62
+ dataframes = model._dataframes
63
+ df_path = join(path, "dataframes")
64
+ for name, df in dataframes.items():
65
+ if df is not None:
66
+ df.write.mode("overwrite").parquet(join(df_path, name))
67
+
68
+ if hasattr(model, "fit_users"):
69
+ model.fit_users.write.mode("overwrite").parquet(join(df_path, "fit_users"))
70
+ if hasattr(model, "fit_items"):
71
+ model.fit_items.write.mode("overwrite").parquet(join(df_path, "fit_items"))
72
+ if hasattr(model, "study"):
73
+ save_picklable_to_parquet(model.study, join(path, "study"))
74
+
75
+
76
+ def load(path: str, model_type=None) -> BaseRecommender:
77
+ """
78
+ Load saved model from disk
79
+
80
+ :param path: path to model folder
81
+ :return: Restored trained model
82
+ """
83
+ spark = State().session
84
+ args = spark.read.json(join(path, "init_args.json")).first().asDict(recursive=True)
85
+ name = args["_model_name"]
86
+ del args["_model_name"]
87
+
88
+ model_class = model_type if model_type is not None else globals()[name]
89
+ if name == "CQL":
90
+ for a in args:
91
+ if isinstance(args[a], dict) and "type" in args[a] and args[a]["type"] == "none":
92
+ args[a]["params"] = {}
93
+ init_args = getfullargspec(model_class.__init__).args
94
+ init_args.remove("self")
95
+ extra_args = set(args) - set(init_args)
96
+ if len(extra_args) > 0:
97
+ extra_args = {key: args[key] for key in args}
98
+ init_args = {key: args[key] for key in init_args}
99
+ else:
100
+ init_args = args
101
+ extra_args = {}
102
+
103
+ model = model_class(**init_args)
104
+ for arg in extra_args:
105
+ model.arg = extra_args[arg]
106
+
107
+ dataframes_paths = get_list_of_paths(spark, join(path, "dataframes"))
108
+ for dataframe_path in dataframes_paths:
109
+ df = spark.read.parquet(dataframe_path)
110
+ attr_name = dataframe_path.split("/")[-1]
111
+ setattr(model, attr_name, df)
112
+
113
+ model._load_model(join(path, "model"))
114
+ fs = get_fs(spark)
115
+ model.study = (
116
+ load_pickled_from_parquet(join(path, "study"))
117
+ if fs.exists(spark._jvm.org.apache.hadoop.fs.Path(join(path, "study")))
118
+ else None
119
+ )
120
+
121
+ return model
122
+
123
+
124
+ def save_indexer(indexer: Indexer, path: Union[str, Path], overwrite: bool = False):
125
+ """
126
+ Save fitted indexer to disk as a folder
127
+
128
+ :param indexer: Trained indexer
129
+ :param path: destination where indexer files will be stored
130
+ """
131
+ if isinstance(path, Path):
132
+ path = str(path)
133
+
134
+ spark = State().session
135
+
136
+ if not overwrite:
137
+ fs = get_fs(spark)
138
+ is_exists = fs.exists(spark._jvm.org.apache.hadoop.fs.Path(path))
139
+ if is_exists:
140
+ msg = f"Path '{path}' already exists. Mode is 'overwrite = False'."
141
+ raise FileExistsError(msg)
142
+
143
+ init_args = indexer._init_args
144
+ init_args["user_type"] = str(indexer.user_type)
145
+ init_args["item_type"] = str(indexer.item_type)
146
+ sc = spark.sparkContext
147
+ df = spark.read.json(sc.parallelize([json.dumps(init_args)]))
148
+ df.coalesce(1).write.mode("overwrite").json(join(path, "init_args.json"))
149
+
150
+ indexer.user_indexer.write().overwrite().save(join(path, "user_indexer"))
151
+ indexer.item_indexer.write().overwrite().save(join(path, "item_indexer"))
152
+ indexer.inv_user_indexer.write().overwrite().save(join(path, "inv_user_indexer"))
153
+ indexer.inv_item_indexer.write().overwrite().save(join(path, "inv_item_indexer"))
154
+
155
+
156
+ def load_indexer(path: str) -> Indexer:
157
+ """
158
+ Load saved indexer from disk
159
+
160
+ :param path: path to folder
161
+ :return: restored Indexer
162
+ """
163
+ spark = State().session
164
+ args = spark.read.json(join(path, "init_args.json")).first().asDict()
165
+
166
+ user_type = args["user_type"]
167
+ del args["user_type"]
168
+ item_type = args["item_type"]
169
+ del args["item_type"]
170
+
171
+ indexer = Indexer(**args)
172
+
173
+ if user_type.endswith("()"):
174
+ user_type = user_type[:-2]
175
+ item_type = item_type[:-2]
176
+ user_type = getattr(st, user_type)
177
+ item_type = getattr(st, item_type)
178
+ indexer.user_type = user_type()
179
+ indexer.item_type = item_type()
180
+
181
+ indexer.user_indexer = StringIndexerModel.load(join(path, "user_indexer"))
182
+ indexer.item_indexer = StringIndexerModel.load(join(path, "item_indexer"))
183
+ indexer.inv_user_indexer = IndexToString.load(join(path, "inv_user_indexer"))
184
+ indexer.inv_item_indexer = IndexToString.load(join(path, "inv_item_indexer"))
185
+
186
+ return indexer
@@ -0,0 +1,44 @@
1
+ from typing import Optional
2
+
3
+ import torch
4
+
5
+ from replay.utils.session_handler import Borg, get_spark_session, logger_with_settings
6
+ from replay.utils.types import PYSPARK_AVAILABLE, MissingImport
7
+
8
+ if PYSPARK_AVAILABLE:
9
+ from pyspark.sql import SparkSession
10
+ else:
11
+ SparkSession = MissingImport
12
+
13
+
14
+ class State(Borg):
15
+ """
16
+ All modules look for Spark session via this class. You can put your own session here.
17
+
18
+ Other parameters are stored here too: ``default device`` for ``pytorch`` (CPU/CUDA)
19
+ """
20
+
21
+ def __init__(
22
+ self,
23
+ session: Optional[SparkSession] = None,
24
+ device: Optional[torch.device] = None,
25
+ ):
26
+ Borg.__init__(self)
27
+ if not hasattr(self, "logger_set"):
28
+ self.logger = logger_with_settings()
29
+ self.logger_set = True
30
+
31
+ if session is None:
32
+ if not hasattr(self, "session"):
33
+ self.session = get_spark_session()
34
+ else:
35
+ self.session = session
36
+
37
+ if device is None:
38
+ if not hasattr(self, "device"):
39
+ if torch.cuda.is_available():
40
+ self.device = torch.device(f"cuda:{torch.cuda.current_device()}")
41
+ else:
42
+ self.device = torch.device("cpu")
43
+ else:
44
+ self.device = device
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: replay-rec
3
- Version: 0.20.3
3
+ Version: 0.20.3rc0
4
4
  Summary: RecSys Library
5
5
  License-Expression: Apache-2.0
6
6
  License-File: LICENSE
@@ -14,29 +14,23 @@ Classifier: Intended Audience :: Developers
14
14
  Classifier: Intended Audience :: Science/Research
15
15
  Classifier: Natural Language :: English
16
16
  Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
17
- Provides-Extra: spark
18
- Provides-Extra: torch
19
- Provides-Extra: torch-cpu
20
- Requires-Dist: lightning (<2.6.0) ; extra == "torch" or extra == "torch-cpu"
21
- Requires-Dist: lightning ; extra == "torch"
22
- Requires-Dist: lightning ; extra == "torch-cpu"
17
+ Requires-Dist: d3rlpy (>=2.8.1,<2.9)
18
+ Requires-Dist: implicit (>=0.7.2,<0.8)
19
+ Requires-Dist: lightautoml (>=0.4.1,<0.5)
20
+ Requires-Dist: lightning (>=2.0.2,<=2.4.0)
21
+ Requires-Dist: numba (>=0.50,<1)
23
22
  Requires-Dist: numpy (>=1.20.0,<2)
24
23
  Requires-Dist: pandas (>=1.3.5,<2.4.0)
25
24
  Requires-Dist: polars (<2.0)
26
- Requires-Dist: psutil (<=7.0.0) ; extra == "spark"
27
- Requires-Dist: psutil ; extra == "spark"
25
+ Requires-Dist: psutil (<=7.0.0)
28
26
  Requires-Dist: pyarrow (<22.0)
29
- Requires-Dist: pyspark (>=3.0,<3.5) ; extra == "spark"
30
- Requires-Dist: pyspark ; extra == "spark"
31
- Requires-Dist: pytorch-optimizer (>=3.8.0,<3.9.0) ; extra == "torch" or extra == "torch-cpu"
32
- Requires-Dist: pytorch-optimizer ; extra == "torch"
33
- Requires-Dist: pytorch-optimizer ; extra == "torch-cpu"
27
+ Requires-Dist: pyspark (>=3.0,<3.5)
28
+ Requires-Dist: pytorch-optimizer (>=3.8.0,<4)
29
+ Requires-Dist: sb-obp (>=0.5.10,<0.6)
34
30
  Requires-Dist: scikit-learn (>=1.6.1,<1.7.0)
35
31
  Requires-Dist: scipy (>=1.8.1,<2.0.0)
36
32
  Requires-Dist: setuptools
37
- Requires-Dist: torch (>=1.8,<2.9.0) ; extra == "torch" or extra == "torch-cpu"
38
- Requires-Dist: torch ; extra == "torch"
39
- Requires-Dist: torch ; extra == "torch-cpu"
33
+ Requires-Dist: torch (>=1.8,<2.9.0)
40
34
  Requires-Dist: tqdm (>=4.67,<5)
41
35
  Project-URL: Homepage, https://sb-ai-lab.github.io/RePlay/
42
36
  Project-URL: Repository, https://github.com/sb-ai-lab/RePlay
@@ -1,4 +1,4 @@
1
- replay/__init__.py,sha256=2Y-XWVf6G7EZYVo5MwU5aWfw38ZhbmBLHuki47tqPrI,225
1
+ replay/__init__.py,sha256=UO9-zAzQ0Zw6zeU2AwtbXyTUIsneXgvNK-oTutQNSEc,233
2
2
  replay/data/__init__.py,sha256=g5bKRyF76QL_BqlED-31RnS8pBdcyj9loMsx5vAG_0E,301
3
3
  replay/data/dataset.py,sha256=yQDc8lfphQYfHpm_T1MhnG8_GyM4ONyxJoFc1rUgdJ8,30755
4
4
  replay/data/dataset_utils/__init__.py,sha256=9wUvG8ZwGUvuzLU4zQI5FDcH0WVVo5YLN2ey3DterP0,55
@@ -11,6 +11,61 @@ replay/data/nn/torch_sequential_dataset.py,sha256=QSh4IM2vzAF095_ZMC1gMqZj9slHXo
11
11
  replay/data/nn/utils.py,sha256=Ic3G4yZRIzBYXLmwP1VstlZXPNR7AYGCc5EyZAERp5c,3297
12
12
  replay/data/schema.py,sha256=JmYLCrNgBS5oq4O_PT724Gr1pDurHEykcqV8Xaj0XTw,15922
13
13
  replay/data/spark_schema.py,sha256=4o0Kn_fjwz2-9dBY3q46F9PL0F3E7jdVpIlX7SG3OZI,1111
14
+ replay/experimental/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
15
+ replay/experimental/metrics/__init__.py,sha256=bdQogGbEDVAeH7Ejbb6vpw7bP6CYhftTu_DQuoFRuCA,2861
16
+ replay/experimental/metrics/base_metric.py,sha256=0ro9VoSnPtPAximnlcgmQaMNg9zoUN2AHAH_2WgfZiQ,22663
17
+ replay/experimental/metrics/coverage.py,sha256=UqYm-WtAlBFZ3kqv8PyLo4qqKiIXmR_CQFAl6H_YdqA,3150
18
+ replay/experimental/metrics/experiment.py,sha256=pD2Dyyg4PM1HjbrNrhAspZJP3B-i2So205qBChRGwwc,7337
19
+ replay/experimental/metrics/hitrate.py,sha256=TfWJrUyZXabdMr4tn8zqUPGDcYy2yphVCzXmLSHCxY0,675
20
+ replay/experimental/metrics/map.py,sha256=S4dKiMpYR0_pu0bqioGMT0kIC1s2aojFP4rddBqMPtM,921
21
+ replay/experimental/metrics/mrr.py,sha256=q6I1Cndlwr716mMuYtTMu0lN8Rrp9khxhb49OM2IpV8,530
22
+ replay/experimental/metrics/ncis_precision.py,sha256=yrErOhBZvZdNpQPx_AXyktDJatqdWRIHNMyei0QDJtQ,1088
23
+ replay/experimental/metrics/ndcg.py,sha256=q3KTsyZCrfvcpEjEnR_kWVB9ZaTFRxnoNRAr2WD0TrU,1538
24
+ replay/experimental/metrics/precision.py,sha256=U9pD9yRGeT8uH32BTyQ-W5qsAnbFWu-pqy4XfkcXfCM,664
25
+ replay/experimental/metrics/recall.py,sha256=5xRPGxfbVoDFEI5E6dVlZpT4RvnDlWzaktyoqh3a8mc,774
26
+ replay/experimental/metrics/rocauc.py,sha256=yq4vW2_bXO8HCjREBZVrHMKeZ054LYvjJmLJTXWPfQA,1675
27
+ replay/experimental/metrics/surprisal.py,sha256=CK4_zed2bSMDwC7ZBCS8d8RwGEqt8bh3w3fTpjKiK6Y,3052
28
+ replay/experimental/metrics/unexpectedness.py,sha256=JQQXEYHtQM8nqp7X2He4E9ZYwbpdENaK8oQG7sUQT3s,2621
29
+ replay/experimental/models/__init__.py,sha256=yeu0PAkqWNqNLDnUYpg0_vpkWT8tG8KmRMybodVFkZ4,1709
30
+ replay/experimental/models/admm_slim.py,sha256=dDg2c_5Lk8acykirtsv38Jg1l6kgAoBhRvPHPv5Vfis,8654
31
+ replay/experimental/models/base_neighbour_rec.py,sha256=Q2C4rle9FeVIncqgMuhLV6qZbPj2Bz8W_Ao8iQu31TU,7387
32
+ replay/experimental/models/base_rec.py,sha256=AmN6-PgIaNzD-sMIndMuRA3TJ0WZBbowCjaSTTgiYrY,54150
33
+ replay/experimental/models/base_torch_rec.py,sha256=mwbbsR-sQuQAFC1d8X2k0zP3iJeEP-X5nAaR3IV7Sqg,8105
34
+ replay/experimental/models/cql.py,sha256=ItTukqhH3V-PItVPawET9zO9tG4D8R4xKzz3tqKMjSc,19619
35
+ replay/experimental/models/ddpg.py,sha256=bzX4KvkuIecYA4bkFB1BnLKE3zqteujhpvsxAXEnKoM,32266
36
+ replay/experimental/models/dt4rec/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
37
+ replay/experimental/models/dt4rec/dt4rec.py,sha256=zcxn2MjrJg8eYqfGwfK80UjH2-uwNDg4PBbmQZz7Le0,5895
38
+ replay/experimental/models/dt4rec/gpt1.py,sha256=T3buFtYyF6Fh6sW6f9dUZFcFEnQdljItbRa22CiKb0w,14044
39
+ replay/experimental/models/dt4rec/trainer.py,sha256=YeaJ8mnoYZqnPwm1P9qOYb8GzgFC5At-JeSDcvG2V2o,3859
40
+ replay/experimental/models/dt4rec/utils.py,sha256=UF--cukjFB3uwzqaVHdCS3ik2qTtw97tzbSFGPkDfE8,8153
41
+ replay/experimental/models/extensions/spark_custom_models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
42
+ replay/experimental/models/extensions/spark_custom_models/als_extension.py,sha256=R9Xj5Yew5eH3dMJ9qfIyhBg4NeMv-wXVEISgzCwFQz0,25846
43
+ replay/experimental/models/hierarchical_recommender.py,sha256=BqnEFBppKawt8Xx5lzBWk6qnmdCrZ7c2hpKj3mi1GvU,11441
44
+ replay/experimental/models/implicit_wrap.py,sha256=8F-f-CaStmlNHwphu-yu8o4Aft08NKDD_SqqH0zp1Uo,4655
45
+ replay/experimental/models/lightfm_wrap.py,sha256=rA9T2vGjrbt_GJV1XccYYsrs9qtgDtqVJCWBHFYrm4k,11329
46
+ replay/experimental/models/mult_vae.py,sha256=l-6g-2fIs80vxBl9VGY4FrJannAXrzsQOyGNuHU8tDs,11601
47
+ replay/experimental/models/neural_ts.py,sha256=oCqStgGg5CpGFAv1dC-3ODmK9nI05evzJ3XKBDQhgAo,42535
48
+ replay/experimental/models/neuromf.py,sha256=acC50kxYlctriNGqyOEkq57Iu4icUvZasyWFeRUJans,14386
49
+ replay/experimental/models/scala_als.py,sha256=6aMl8hUFR2J_nI5U8Z_-5BxfeATiWnC8zdj1C0AFbm4,10751
50
+ replay/experimental/models/u_lin_ucb.py,sha256=-gu6meOYeSwP6N8ILtwasWYj4Mbs6EJEFQXUHE8N_lY,3750
51
+ replay/experimental/nn/data/__init__.py,sha256=5EAF-FNd7xhkUpTq_5MyVcPXBD81mJCwYrcbhdGOWjE,48
52
+ replay/experimental/nn/data/schema_builder.py,sha256=nfE0-bVgYUwzyhNTTcXUWhfNBAZQLHWenM6-zEglqps,3301
53
+ replay/experimental/preprocessing/__init__.py,sha256=uMyeyQ_GKqjLhVGwhrEk3NLhhzS0DKi5xGo3VF4WkiA,130
54
+ replay/experimental/preprocessing/data_preparator.py,sha256=-yqWZT06iEYsY7rCSGRAgLcp6o7jvlsU431HspHQ2o4,35940
55
+ replay/experimental/preprocessing/padder.py,sha256=uxE6WlmYNd9kbACMEidxG1L19G5Rk0gQbvpN_TosMZ4,9558
56
+ replay/experimental/preprocessing/sequence_generator.py,sha256=vFtLkq9MuLGThPsa67103qlcMLYLfnAkR_HI1FXPwjw,9047
57
+ replay/experimental/scenarios/__init__.py,sha256=gWFLCkLyOmOppvbRMK7C3UMlMpcbIgiGVolSH6LPgWA,91
58
+ replay/experimental/scenarios/obp_wrapper/__init__.py,sha256=ZOJgpjRsmhXTpzGumk3AALKmstNBachtu_hOXUIPY5s,434
59
+ replay/experimental/scenarios/obp_wrapper/obp_optuna_objective.py,sha256=swwcot05a8GzIVhEKpfmjG16CuciItVuddPaOjCKo9o,2543
60
+ replay/experimental/scenarios/obp_wrapper/replay_offline.py,sha256=9ZP17steBiTh_KO37NnXWyN5LuPpABPhL_QG4JJHf7I,9622
61
+ replay/experimental/scenarios/obp_wrapper/utils.py,sha256=Uv_fqyJDt69vIdrw-Y9orLLzyHG0ko8svza0Hs_a87Q,3233
62
+ replay/experimental/scenarios/two_stages/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
63
+ replay/experimental/scenarios/two_stages/reranker.py,sha256=NQhooA3OXLAh_PwydBNU2DGRRGPq2j2R0SSHtDM7hlg,4238
64
+ replay/experimental/scenarios/two_stages/two_stages_scenario.py,sha256=u41ymdhx0MS1I08VDjJ2UhXpSqsfTA1x9Hbz1tOaWLY,29822
65
+ replay/experimental/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
66
+ replay/experimental/utils/logger.py,sha256=UwLowaeOG17sDEe32LiZel8MnjSTzeW7J3uLG1iwLuA,639
67
+ replay/experimental/utils/model_handler.py,sha256=Rfj57E1R_XMEEigHNZa9a-rzEsyLWSDsgKfXoRzWWdg,6426
68
+ replay/experimental/utils/session_handler.py,sha256=H0C-Q2pqrs_5aDvoAkRMZuS5qu07uhu6g5FEL3NJiic,1305
14
69
  replay/metrics/__init__.py,sha256=j0PGvUehaPEZMNo9SQwJsnvzrS4bam9eHrRMQFLnMjY,2813
15
70
  replay/metrics/base_metric.py,sha256=ejtwFHktN4J8Fi1HIM3w0zlMAd8nO7-XpFi2D1iHXUQ,16010
16
71
  replay/metrics/categorical_diversity.py,sha256=3tp8n457Ob4gjM-UTB5N19u9WAF7fLDkWKk-Mth-Vzc,10769
@@ -131,8 +186,8 @@ replay/utils/session_handler.py,sha256=fQo2wseow8yuzKnEXT-aYAXcQIgRbTTXp0v7g1VVi
131
186
  replay/utils/spark_utils.py,sha256=GbRp-MuUoO3Pc4chFvlmo9FskSlRLeNlC3Go5pEJ6Ok,27411
132
187
  replay/utils/time.py,sha256=J8asoQBytPcNw-BLGADYIsKeWhIoN1H5hKiX9t2AMqo,9376
133
188
  replay/utils/types.py,sha256=rD9q9CqEXgF4yy512Hv2nXclvwcnfodOnhBZ1HSUI4c,1260
134
- replay_rec-0.20.3.dist-info/METADATA,sha256=AS8K7dngECYkEmit2-wSMhqWJpNL8ryJtjndQjctYhs,13562
135
- replay_rec-0.20.3.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
136
- replay_rec-0.20.3.dist-info/licenses/LICENSE,sha256=rPmcA7UrHxBChEAAlJyE24qUWKKl9yLQXxFsKeg_LX4,11344
137
- replay_rec-0.20.3.dist-info/licenses/NOTICE,sha256=k0bo4KHiHLRax5K3XKTTrf2Fi8V91mJ-R3FMdh6Reg0,2002
138
- replay_rec-0.20.3.dist-info/RECORD,,
189
+ replay_rec-0.20.3rc0.dist-info/METADATA,sha256=LCtP5r4PZP-jTKxRCFkk1mYq6qqUNS4rgjcDfmF3by8,13155
190
+ replay_rec-0.20.3rc0.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
191
+ replay_rec-0.20.3rc0.dist-info/licenses/LICENSE,sha256=rPmcA7UrHxBChEAAlJyE24qUWKKl9yLQXxFsKeg_LX4,11344
192
+ replay_rec-0.20.3rc0.dist-info/licenses/NOTICE,sha256=k0bo4KHiHLRax5K3XKTTrf2Fi8V91mJ-R3FMdh6Reg0,2002
193
+ replay_rec-0.20.3rc0.dist-info/RECORD,,