replay-rec 0.18.0__py3-none-any.whl → 0.18.0rc0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. replay/__init__.py +1 -1
  2. replay/experimental/__init__.py +0 -0
  3. replay/experimental/metrics/__init__.py +62 -0
  4. replay/experimental/metrics/base_metric.py +602 -0
  5. replay/experimental/metrics/coverage.py +97 -0
  6. replay/experimental/metrics/experiment.py +175 -0
  7. replay/experimental/metrics/hitrate.py +26 -0
  8. replay/experimental/metrics/map.py +30 -0
  9. replay/experimental/metrics/mrr.py +18 -0
  10. replay/experimental/metrics/ncis_precision.py +31 -0
  11. replay/experimental/metrics/ndcg.py +49 -0
  12. replay/experimental/metrics/precision.py +22 -0
  13. replay/experimental/metrics/recall.py +25 -0
  14. replay/experimental/metrics/rocauc.py +49 -0
  15. replay/experimental/metrics/surprisal.py +90 -0
  16. replay/experimental/metrics/unexpectedness.py +76 -0
  17. replay/experimental/models/__init__.py +10 -0
  18. replay/experimental/models/admm_slim.py +205 -0
  19. replay/experimental/models/base_neighbour_rec.py +204 -0
  20. replay/experimental/models/base_rec.py +1271 -0
  21. replay/experimental/models/base_torch_rec.py +234 -0
  22. replay/experimental/models/cql.py +454 -0
  23. replay/experimental/models/ddpg.py +923 -0
  24. replay/experimental/models/dt4rec/__init__.py +0 -0
  25. replay/experimental/models/dt4rec/dt4rec.py +189 -0
  26. replay/experimental/models/dt4rec/gpt1.py +401 -0
  27. replay/experimental/models/dt4rec/trainer.py +127 -0
  28. replay/experimental/models/dt4rec/utils.py +265 -0
  29. replay/experimental/models/extensions/spark_custom_models/__init__.py +0 -0
  30. replay/experimental/models/extensions/spark_custom_models/als_extension.py +792 -0
  31. replay/experimental/models/implicit_wrap.py +131 -0
  32. replay/experimental/models/lightfm_wrap.py +302 -0
  33. replay/experimental/models/mult_vae.py +332 -0
  34. replay/experimental/models/neuromf.py +406 -0
  35. replay/experimental/models/scala_als.py +296 -0
  36. replay/experimental/nn/data/__init__.py +1 -0
  37. replay/experimental/nn/data/schema_builder.py +55 -0
  38. replay/experimental/preprocessing/__init__.py +3 -0
  39. replay/experimental/preprocessing/data_preparator.py +839 -0
  40. replay/experimental/preprocessing/padder.py +229 -0
  41. replay/experimental/preprocessing/sequence_generator.py +208 -0
  42. replay/experimental/scenarios/__init__.py +1 -0
  43. replay/experimental/scenarios/obp_wrapper/__init__.py +8 -0
  44. replay/experimental/scenarios/obp_wrapper/obp_optuna_objective.py +74 -0
  45. replay/experimental/scenarios/obp_wrapper/replay_offline.py +248 -0
  46. replay/experimental/scenarios/obp_wrapper/utils.py +87 -0
  47. replay/experimental/scenarios/two_stages/__init__.py +0 -0
  48. replay/experimental/scenarios/two_stages/reranker.py +117 -0
  49. replay/experimental/scenarios/two_stages/two_stages_scenario.py +757 -0
  50. replay/experimental/utils/__init__.py +0 -0
  51. replay/experimental/utils/logger.py +24 -0
  52. replay/experimental/utils/model_handler.py +186 -0
  53. replay/experimental/utils/session_handler.py +44 -0
  54. {replay_rec-0.18.0.dist-info → replay_rec-0.18.0rc0.dist-info}/METADATA +11 -3
  55. replay_rec-0.18.0rc0.dist-info/NOTICE +41 -0
  56. {replay_rec-0.18.0.dist-info → replay_rec-0.18.0rc0.dist-info}/RECORD +58 -5
  57. {replay_rec-0.18.0.dist-info → replay_rec-0.18.0rc0.dist-info}/WHEEL +1 -1
  58. {replay_rec-0.18.0.dist-info → replay_rec-0.18.0rc0.dist-info}/LICENSE +0 -0
File without changes
@@ -0,0 +1,24 @@
1
+ import logging
2
+
3
+
4
+ def get_logger(
5
+ name,
6
+ level=logging.INFO,
7
+ format_str="%(asctime)s [%(pathname)s:%(lineno)s - %(levelname)s ] %(message)s",
8
+ date_format="%Y-%m-%d %H:%M:%S",
9
+ file=False,
10
+ ):
11
+ """
12
+ Get python logger instance
13
+ """
14
+ logger = logging.getLogger(name)
15
+ logger.setLevel(level)
16
+
17
+ if not logger.hasHandlers():
18
+ handler = logging.StreamHandler() if not file else logging.FileHandler(name)
19
+ handler.setLevel(level)
20
+ formatter = logging.Formatter(fmt=format_str, datefmt=date_format)
21
+ handler.setFormatter(formatter)
22
+ logger.addHandler(handler)
23
+
24
+ return logger
@@ -0,0 +1,186 @@
1
+ import json
2
+ from inspect import getfullargspec
3
+ from os.path import join
4
+ from pathlib import Path
5
+ from typing import Union
6
+
7
+ from replay.experimental.models.base_rec import BaseRecommender
8
+ from replay.experimental.preprocessing import Indexer
9
+ from replay.utils import PYSPARK_AVAILABLE
10
+ from replay.utils.session_handler import State
11
+ from replay.utils.spark_utils import load_pickled_from_parquet, save_picklable_to_parquet
12
+
13
+ if PYSPARK_AVAILABLE:
14
+ import pyspark.sql.types as st
15
+ from pyspark.ml.feature import IndexToString, StringIndexerModel
16
+ from pyspark.sql import SparkSession
17
+
18
+ from replay.utils.model_handler import get_fs
19
+
20
+ def get_list_of_paths(spark: SparkSession, dir_path: str):
21
+ """
22
+ Returns list of paths to files in the `dir_path`
23
+
24
+ :param spark: spark session
25
+ :param dir_path: path to dir in hdfs or local disk
26
+ :return: list of paths to files
27
+ """
28
+ fs = get_fs(spark)
29
+ statuses = fs.listStatus(spark._jvm.org.apache.hadoop.fs.Path(dir_path))
30
+ return [str(f.getPath()) for f in statuses]
31
+
32
+
33
+ def save(model: BaseRecommender, path: Union[str, Path], overwrite: bool = False):
34
+ """
35
+ Save fitted model to disk as a folder
36
+
37
+ :param model: Trained recommender
38
+ :param path: destination where model files will be stored
39
+ :return:
40
+ """
41
+ if isinstance(path, Path):
42
+ path = str(path)
43
+
44
+ spark = State().session
45
+
46
+ fs = get_fs(spark)
47
+ if not overwrite:
48
+ is_exists = fs.exists(spark._jvm.org.apache.hadoop.fs.Path(path))
49
+ if is_exists:
50
+ msg = f"Path '{path}' already exists. Mode is 'overwrite = False'."
51
+ raise FileExistsError(msg)
52
+
53
+ fs.mkdirs(spark._jvm.org.apache.hadoop.fs.Path(path))
54
+ model._save_model(join(path, "model"))
55
+
56
+ init_args = model._init_args
57
+ init_args["_model_name"] = str(model)
58
+ sc = spark.sparkContext
59
+ df = spark.read.json(sc.parallelize([json.dumps(init_args)]))
60
+ df.coalesce(1).write.mode("overwrite").option("ignoreNullFields", "false").json(join(path, "init_args.json"))
61
+
62
+ dataframes = model._dataframes
63
+ df_path = join(path, "dataframes")
64
+ for name, df in dataframes.items():
65
+ if df is not None:
66
+ df.write.mode("overwrite").parquet(join(df_path, name))
67
+
68
+ if hasattr(model, "fit_users"):
69
+ model.fit_users.write.mode("overwrite").parquet(join(df_path, "fit_users"))
70
+ if hasattr(model, "fit_items"):
71
+ model.fit_items.write.mode("overwrite").parquet(join(df_path, "fit_items"))
72
+ if hasattr(model, "study"):
73
+ save_picklable_to_parquet(model.study, join(path, "study"))
74
+
75
+
76
+ def load(path: str, model_type=None) -> BaseRecommender:
77
+ """
78
+ Load saved model from disk
79
+
80
+ :param path: path to model folder
81
+ :return: Restored trained model
82
+ """
83
+ spark = State().session
84
+ args = spark.read.json(join(path, "init_args.json")).first().asDict(recursive=True)
85
+ name = args["_model_name"]
86
+ del args["_model_name"]
87
+
88
+ model_class = model_type if model_type is not None else globals()[name]
89
+ if name == "CQL":
90
+ for a in args:
91
+ if isinstance(args[a], dict) and "type" in args[a] and args[a]["type"] == "none":
92
+ args[a]["params"] = {}
93
+ init_args = getfullargspec(model_class.__init__).args
94
+ init_args.remove("self")
95
+ extra_args = set(args) - set(init_args)
96
+ if len(extra_args) > 0:
97
+ extra_args = {key: args[key] for key in args}
98
+ init_args = {key: args[key] for key in init_args}
99
+ else:
100
+ init_args = args
101
+ extra_args = {}
102
+
103
+ model = model_class(**init_args)
104
+ for arg in extra_args:
105
+ model.arg = extra_args[arg]
106
+
107
+ dataframes_paths = get_list_of_paths(spark, join(path, "dataframes"))
108
+ for dataframe_path in dataframes_paths:
109
+ df = spark.read.parquet(dataframe_path)
110
+ attr_name = dataframe_path.split("/")[-1]
111
+ setattr(model, attr_name, df)
112
+
113
+ model._load_model(join(path, "model"))
114
+ fs = get_fs(spark)
115
+ model.study = (
116
+ load_pickled_from_parquet(join(path, "study"))
117
+ if fs.exists(spark._jvm.org.apache.hadoop.fs.Path(join(path, "study")))
118
+ else None
119
+ )
120
+
121
+ return model
122
+
123
+
124
+ def save_indexer(indexer: Indexer, path: Union[str, Path], overwrite: bool = False):
125
+ """
126
+ Save fitted indexer to disk as a folder
127
+
128
+ :param indexer: Trained indexer
129
+ :param path: destination where indexer files will be stored
130
+ """
131
+ if isinstance(path, Path):
132
+ path = str(path)
133
+
134
+ spark = State().session
135
+
136
+ if not overwrite:
137
+ fs = get_fs(spark)
138
+ is_exists = fs.exists(spark._jvm.org.apache.hadoop.fs.Path(path))
139
+ if is_exists:
140
+ msg = f"Path '{path}' already exists. Mode is 'overwrite = False'."
141
+ raise FileExistsError(msg)
142
+
143
+ init_args = indexer._init_args
144
+ init_args["user_type"] = str(indexer.user_type)
145
+ init_args["item_type"] = str(indexer.item_type)
146
+ sc = spark.sparkContext
147
+ df = spark.read.json(sc.parallelize([json.dumps(init_args)]))
148
+ df.coalesce(1).write.mode("overwrite").json(join(path, "init_args.json"))
149
+
150
+ indexer.user_indexer.write().overwrite().save(join(path, "user_indexer"))
151
+ indexer.item_indexer.write().overwrite().save(join(path, "item_indexer"))
152
+ indexer.inv_user_indexer.write().overwrite().save(join(path, "inv_user_indexer"))
153
+ indexer.inv_item_indexer.write().overwrite().save(join(path, "inv_item_indexer"))
154
+
155
+
156
+ def load_indexer(path: str) -> Indexer:
157
+ """
158
+ Load saved indexer from disk
159
+
160
+ :param path: path to folder
161
+ :return: restored Indexer
162
+ """
163
+ spark = State().session
164
+ args = spark.read.json(join(path, "init_args.json")).first().asDict()
165
+
166
+ user_type = args["user_type"]
167
+ del args["user_type"]
168
+ item_type = args["item_type"]
169
+ del args["item_type"]
170
+
171
+ indexer = Indexer(**args)
172
+
173
+ if user_type.endswith("()"):
174
+ user_type = user_type[:-2]
175
+ item_type = item_type[:-2]
176
+ user_type = getattr(st, user_type)
177
+ item_type = getattr(st, item_type)
178
+ indexer.user_type = user_type()
179
+ indexer.item_type = item_type()
180
+
181
+ indexer.user_indexer = StringIndexerModel.load(join(path, "user_indexer"))
182
+ indexer.item_indexer = StringIndexerModel.load(join(path, "item_indexer"))
183
+ indexer.inv_user_indexer = IndexToString.load(join(path, "inv_user_indexer"))
184
+ indexer.inv_item_indexer = IndexToString.load(join(path, "inv_item_indexer"))
185
+
186
+ return indexer
@@ -0,0 +1,44 @@
1
+ from typing import Optional
2
+
3
+ import torch
4
+
5
+ from replay.utils.session_handler import Borg, get_spark_session, logger_with_settings
6
+ from replay.utils.types import PYSPARK_AVAILABLE, MissingImportType
7
+
8
+ if PYSPARK_AVAILABLE:
9
+ from pyspark.sql import SparkSession
10
+ else:
11
+ SparkSession = MissingImportType
12
+
13
+
14
+ class State(Borg):
15
+ """
16
+ All modules look for Spark session via this class. You can put your own session here.
17
+
18
+ Other parameters are stored here too: ``default device`` for ``pytorch`` (CPU/CUDA)
19
+ """
20
+
21
+ def __init__(
22
+ self,
23
+ session: Optional[SparkSession] = None,
24
+ device: Optional[torch.device] = None,
25
+ ):
26
+ Borg.__init__(self)
27
+ if not hasattr(self, "logger_set"):
28
+ self.logger = logger_with_settings()
29
+ self.logger_set = True
30
+
31
+ if session is None:
32
+ if not hasattr(self, "session"):
33
+ self.session = get_spark_session()
34
+ else:
35
+ self.session = session
36
+
37
+ if device is None:
38
+ if not hasattr(self, "device"):
39
+ if torch.cuda.is_available():
40
+ self.device = torch.device(f"cuda:{torch.cuda.current_device()}")
41
+ else:
42
+ self.device = torch.device("cpu")
43
+ else:
44
+ self.device = device
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: replay-rec
3
- Version: 0.18.0
3
+ Version: 0.18.0rc0
4
4
  Summary: RecSys Library
5
5
  Home-page: https://sb-ai-lab.github.io/RePlay/
6
6
  License: Apache-2.0
@@ -21,18 +21,26 @@ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
21
21
  Provides-Extra: all
22
22
  Provides-Extra: spark
23
23
  Provides-Extra: torch
24
+ Requires-Dist: d3rlpy (>=2.0.4,<3.0.0)
24
25
  Requires-Dist: fixed-install-nmslib (==2.1.2)
26
+ Requires-Dist: gym (>=0.26.0,<0.27.0)
25
27
  Requires-Dist: hnswlib (>=0.7.0,<0.8.0)
28
+ Requires-Dist: implicit (>=0.7.0,<0.8.0)
29
+ Requires-Dist: lightautoml (>=0.3.1,<0.4.0)
30
+ Requires-Dist: lightfm (==1.17)
26
31
  Requires-Dist: lightning (>=2.0.2,<=2.4.0) ; extra == "torch" or extra == "all"
32
+ Requires-Dist: llvmlite (>=0.32.1)
33
+ Requires-Dist: numba (>=0.50)
27
34
  Requires-Dist: numpy (>=1.20.0)
28
35
  Requires-Dist: optuna (>=3.2.0,<3.3.0)
29
36
  Requires-Dist: pandas (>=1.3.5,<=2.2.2)
30
37
  Requires-Dist: polars (>=1.0.0,<1.1.0)
31
38
  Requires-Dist: psutil (>=6.0.0,<6.1.0)
32
39
  Requires-Dist: pyarrow (>=12.0.1)
33
- Requires-Dist: pyspark (>=3.0,<3.6) ; (python_full_version >= "3.8.1" and python_version < "3.11") and (extra == "spark" or extra == "all")
34
- Requires-Dist: pyspark (>=3.4,<3.6) ; (python_version >= "3.11" and python_version < "3.12") and (extra == "spark" or extra == "all")
40
+ Requires-Dist: pyspark (>=3.0,<3.5) ; (python_full_version >= "3.8.1" and python_version < "3.11") and (extra == "spark" or extra == "all")
41
+ Requires-Dist: pyspark (>=3.4,<3.5) ; (python_version >= "3.11" and python_version < "3.12") and (extra == "spark" or extra == "all")
35
42
  Requires-Dist: pytorch-ranger (>=0.1.1,<0.2.0) ; extra == "torch" or extra == "all"
43
+ Requires-Dist: sb-obp (>=0.5.8,<0.6.0)
36
44
  Requires-Dist: scikit-learn (>=1.0.2,<2.0.0)
37
45
  Requires-Dist: scipy (>=1.8.1,<2.0.0)
38
46
  Requires-Dist: torch (>=1.8,<=2.4.0) ; extra == "torch" or extra == "all"
@@ -0,0 +1,41 @@
1
+ Apache Spark
2
+ Copyright 2014 and onwards The Apache Software Foundation.
3
+
4
+ This product includes software developed at
5
+ The Apache Software Foundation (http://www.apache.org/).
6
+
7
+
8
+ Export Control Notice
9
+ ---------------------
10
+
11
+ This distribution includes cryptographic software. The country in which you currently reside may have
12
+ restrictions on the import, possession, use, and/or re-export to another country, of encryption software.
13
+ BEFORE using any encryption software, please check your country's laws, regulations and policies concerning
14
+ the import, possession, or use, and re-export of encryption software, to see if this is permitted. See
15
+ <http://www.wassenaar.org/> for more information.
16
+
17
+ The U.S. Government Department of Commerce, Bureau of Industry and Security (BIS), has classified this
18
+ software as Export Commodity Control Number (ECCN) 5D002.C.1, which includes information security software
19
+ using or performing cryptographic functions with asymmetric algorithms. The form and manner of this Apache
20
+ Software Foundation distribution makes it eligible for export under the License Exception ENC Technology
21
+ Software Unrestricted (TSU) exception (see the BIS Export Administration Regulations, Section 740.13) for
22
+ both object code and source code.
23
+
24
+ The following provides more details on the included cryptographic software:
25
+
26
+ This software uses Apache Commons Crypto (https://commons.apache.org/proper/commons-crypto/) to
27
+ support authentication, and encryption and decryption of data sent across the network between
28
+ services.
29
+
30
+
31
+ Metrics
32
+ Copyright 2010-2013 Coda Hale and Yammer, Inc.
33
+
34
+ This product includes software developed by Coda Hale and Yammer, Inc.
35
+
36
+ This product includes code derived from the JSR-166 project (ThreadLocalRandom, Striped64,
37
+ LongAdder), which was released with the following comments:
38
+
39
+ Written by Doug Lea with assistance from members of JCP JSR-166
40
+ Expert Group and released to the public domain, as explained at
41
+ http://creativecommons.org/publicdomain/zero/1.0/
@@ -1,4 +1,4 @@
1
- replay/__init__.py,sha256=CfUvW1hW04UHwi_dUOzcLqfgex6e2fWHaHTCl30i8LQ,47
1
+ replay/__init__.py,sha256=8QXsQRY27Ie9xmwimwzqKYG4KTLnxtZW0ns89LKKtUU,55
2
2
  replay/data/__init__.py,sha256=g5bKRyF76QL_BqlED-31RnS8pBdcyj9loMsx5vAG_0E,301
3
3
  replay/data/dataset.py,sha256=FnvsFeIcCMlq94_NDQRY3-jgpVvKN-4FdivABWVr8Pk,29481
4
4
  replay/data/dataset_utils/__init__.py,sha256=9wUvG8ZwGUvuzLU4zQI5FDcH0WVVo5YLN2ey3DterP0,55
@@ -11,6 +11,58 @@ replay/data/nn/torch_sequential_dataset.py,sha256=BqrK_PtkhpsaY1zRIWGk4EgwPL31a7
11
11
  replay/data/nn/utils.py,sha256=YKE9gkIHZDDiwv4THqOWL4PzsdOujnPuM97v79Mwq0E,2769
12
12
  replay/data/schema.py,sha256=F_cv6sYb6l23yuX5xWnbqoJ9oSeUT2NpIM19u8Lf2jA,15606
13
13
  replay/data/spark_schema.py,sha256=4o0Kn_fjwz2-9dBY3q46F9PL0F3E7jdVpIlX7SG3OZI,1111
14
+ replay/experimental/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
15
+ replay/experimental/metrics/__init__.py,sha256=bdQogGbEDVAeH7Ejbb6vpw7bP6CYhftTu_DQuoFRuCA,2861
16
+ replay/experimental/metrics/base_metric.py,sha256=mWbkRGdHTF3ZHq9WSqTGGAX2XJtOSzwcefjSu1Mdl0Y,22649
17
+ replay/experimental/metrics/coverage.py,sha256=3kVBAUhIEOuD8aJ6DShH2xh_1F61dcLZb001VCkmeJk,3154
18
+ replay/experimental/metrics/experiment.py,sha256=Bd_XB9zbngcAwf5JLZKVPsFWQoz9pEGlPEUbkiR_MDc,7343
19
+ replay/experimental/metrics/hitrate.py,sha256=TfWJrUyZXabdMr4tn8zqUPGDcYy2yphVCzXmLSHCxY0,675
20
+ replay/experimental/metrics/map.py,sha256=S4dKiMpYR0_pu0bqioGMT0kIC1s2aojFP4rddBqMPtM,921
21
+ replay/experimental/metrics/mrr.py,sha256=q6I1Cndlwr716mMuYtTMu0lN8Rrp9khxhb49OM2IpV8,530
22
+ replay/experimental/metrics/ncis_precision.py,sha256=yrErOhBZvZdNpQPx_AXyktDJatqdWRIHNMyei0QDJtQ,1088
23
+ replay/experimental/metrics/ndcg.py,sha256=q3KTsyZCrfvcpEjEnR_kWVB9ZaTFRxnoNRAr2WD0TrU,1538
24
+ replay/experimental/metrics/precision.py,sha256=U9pD9yRGeT8uH32BTyQ-W5qsAnbFWu-pqy4XfkcXfCM,664
25
+ replay/experimental/metrics/recall.py,sha256=5xRPGxfbVoDFEI5E6dVlZpT4RvnDlWzaktyoqh3a8mc,774
26
+ replay/experimental/metrics/rocauc.py,sha256=yq4vW2_bXO8HCjREBZVrHMKeZ054LYvjJmLJTXWPfQA,1675
27
+ replay/experimental/metrics/surprisal.py,sha256=CK4_zed2bSMDwC7ZBCS8d8RwGEqt8bh3w3fTpjKiK6Y,3052
28
+ replay/experimental/metrics/unexpectedness.py,sha256=JQQXEYHtQM8nqp7X2He4E9ZYwbpdENaK8oQG7sUQT3s,2621
29
+ replay/experimental/models/__init__.py,sha256=R284PXgSxt-JWWwlSTLggchash0hrLfy4b2w-ySaQf4,588
30
+ replay/experimental/models/admm_slim.py,sha256=Oz-x0aQAnGFN9z7PB7MiKfduBasc4KQrBT0JwtYdwLY,6581
31
+ replay/experimental/models/base_neighbour_rec.py,sha256=pRcffr0cdRNZRVpzWb2Qv-UIsLkhbs7K1GRAmrSqPSM,7506
32
+ replay/experimental/models/base_rec.py,sha256=eTHQdjEaS_5e-8y7xB6tHlSObD0cbD66_NfFZJK2NxU,49571
33
+ replay/experimental/models/base_torch_rec.py,sha256=oDkCxVFQjIHSWKlCns6mU3ECWbQW3mQZWvBHBxJQdwc,8111
34
+ replay/experimental/models/cql.py,sha256=3IBQEqWfyHmvGxCvWtIbLgjuRWfd_8mySg8bVaI4KHQ,19630
35
+ replay/experimental/models/ddpg.py,sha256=uqWk235-YZ2na-NPN4TxUM9ZhogpLZEjivt1oSC2rtI,32080
36
+ replay/experimental/models/dt4rec/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
37
+ replay/experimental/models/dt4rec/dt4rec.py,sha256=ZIHYonDubStN7Gb703csy86R7Q3_1fZc4zJf98HYFe4,5895
38
+ replay/experimental/models/dt4rec/gpt1.py,sha256=T3buFtYyF6Fh6sW6f9dUZFcFEnQdljItbRa22CiKb0w,14044
39
+ replay/experimental/models/dt4rec/trainer.py,sha256=YeaJ8mnoYZqnPwm1P9qOYb8GzgFC5At-JeSDcvG2V2o,3859
40
+ replay/experimental/models/dt4rec/utils.py,sha256=jbCx2Xc85VtjQx-caYhJFfVuj1Wf866OAiSoZlR4q48,8201
41
+ replay/experimental/models/extensions/spark_custom_models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
42
+ replay/experimental/models/extensions/spark_custom_models/als_extension.py,sha256=dKSVCMXWRB7IUnpEK_QNhSEuUSVcG793E8MT_AGXneY,25890
43
+ replay/experimental/models/implicit_wrap.py,sha256=8F-f-CaStmlNHwphu-yu8o4Aft08NKDD_SqqH0zp1Uo,4655
44
+ replay/experimental/models/lightfm_wrap.py,sha256=8nuTpiBuddKlMFFpbUpRt5k_JiBGRjPpF_hNbKdLP4Q,11327
45
+ replay/experimental/models/mult_vae.py,sha256=BnnlUHPlNuvh7EFA8bjITRW_m8JQANRD6zvsNQ1SUXM,11608
46
+ replay/experimental/models/neuromf.py,sha256=Hr9qEKv1shkwAqCVCxfews1Pk3F6yni2WIZUGS2tNCE,14392
47
+ replay/experimental/models/scala_als.py,sha256=-sMZ8P_XbmVi-hApuS46MpaosVIXRED05cgsOI3ojvQ,10975
48
+ replay/experimental/nn/data/__init__.py,sha256=5EAF-FNd7xhkUpTq_5MyVcPXBD81mJCwYrcbhdGOWjE,48
49
+ replay/experimental/nn/data/schema_builder.py,sha256=5PphL9kK-tVm30nWdTjHUzqVOnTwKiU_MlxGdL5HJ8Y,1736
50
+ replay/experimental/preprocessing/__init__.py,sha256=uMyeyQ_GKqjLhVGwhrEk3NLhhzS0DKi5xGo3VF4WkiA,130
51
+ replay/experimental/preprocessing/data_preparator.py,sha256=SLyk4HWurLmUHuev5L_GmI3oVU-58lCflOExHJ7zCGw,35964
52
+ replay/experimental/preprocessing/padder.py,sha256=ROKnGA0136C9W9Qkky-1V5klcMxvwos5KL4_jMLOgwY,9564
53
+ replay/experimental/preprocessing/sequence_generator.py,sha256=E1_0uZJLv8V_n6YzRlgUWtcrHIdjNwPeBN-BMbz0e-A,9053
54
+ replay/experimental/scenarios/__init__.py,sha256=gWFLCkLyOmOppvbRMK7C3UMlMpcbIgiGVolSH6LPgWA,91
55
+ replay/experimental/scenarios/obp_wrapper/__init__.py,sha256=rsRyfsTnVNp20LkTEugwoBrV9XWbIhR8tOqec_Au6dY,450
56
+ replay/experimental/scenarios/obp_wrapper/obp_optuna_objective.py,sha256=vmLANYB5i1UR3uY7e-T0IBEYwPxOYHtKqhkmUvMUYhU,2548
57
+ replay/experimental/scenarios/obp_wrapper/replay_offline.py,sha256=A6TPBFHj_UUL0N6DHSF0-hACsH5cw2o1GMYvpPS6964,8756
58
+ replay/experimental/scenarios/obp_wrapper/utils.py,sha256=-ioWTb73NmHWxVxw4BdSolctqeeGIyjKtydwc45nrrk,3271
59
+ replay/experimental/scenarios/two_stages/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
60
+ replay/experimental/scenarios/two_stages/reranker.py,sha256=tJtWhbHRNV4sJZ9RZzqIfylTplKh9QVwTIBhEGGnXq8,4244
61
+ replay/experimental/scenarios/two_stages/two_stages_scenario.py,sha256=frwsST85YGMGEZPf4DZFp3kPKPEcVgaxOCEdtZywpkw,29841
62
+ replay/experimental/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
63
+ replay/experimental/utils/logger.py,sha256=UwLowaeOG17sDEe32LiZel8MnjSTzeW7J3uLG1iwLuA,639
64
+ replay/experimental/utils/model_handler.py,sha256=Rfj57E1R_XMEEigHNZa9a-rzEsyLWSDsgKfXoRzWWdg,6426
65
+ replay/experimental/utils/session_handler.py,sha256=076TLpTOcnh13BznNTtJW6Zhrqvm9Ee1mlpP5YMD4No,1313
14
66
  replay/metrics/__init__.py,sha256=j0PGvUehaPEZMNo9SQwJsnvzrS4bam9eHrRMQFLnMjY,2813
15
67
  replay/metrics/base_metric.py,sha256=uleW5vLrdA3iRx72tFyW0cxe6ne_ugQ1XaY_ZTcnAOo,15960
16
68
  replay/metrics/categorical_diversity.py,sha256=OYsF-Ng-WrF9CC-sKgQKngrA779NO8MtgRvvAyC8MXM,10781
@@ -121,7 +173,8 @@ replay/utils/session_handler.py,sha256=RYzQvvOnukundccEBnH4ghEdyUgiGB9etz5e3Elvf
121
173
  replay/utils/spark_utils.py,sha256=LBzS8PJc6Mq8q7S_f6BbQZkeOEW49briAdp--pwFWbs,26891
122
174
  replay/utils/time.py,sha256=J8asoQBytPcNw-BLGADYIsKeWhIoN1H5hKiX9t2AMqo,9376
123
175
  replay/utils/types.py,sha256=5sw0A7NG4ZgQKdWORnBy0wBZ5F98sP_Ju8SKQ6zbDS4,651
124
- replay_rec-0.18.0.dist-info/LICENSE,sha256=rPmcA7UrHxBChEAAlJyE24qUWKKl9yLQXxFsKeg_LX4,11344
125
- replay_rec-0.18.0.dist-info/METADATA,sha256=GqTiw9bPlIkXG9mwL0a_2V8EuPsImj9oZzvonqiR0Ug,10863
126
- replay_rec-0.18.0.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
127
- replay_rec-0.18.0.dist-info/RECORD,,
176
+ replay_rec-0.18.0rc0.dist-info/LICENSE,sha256=rPmcA7UrHxBChEAAlJyE24qUWKKl9yLQXxFsKeg_LX4,11344
177
+ replay_rec-0.18.0rc0.dist-info/METADATA,sha256=u_aqIEAypmp3QkU8Jgt0knTsyUbgxcF2lBGz2evOdIg,11164
178
+ replay_rec-0.18.0rc0.dist-info/NOTICE,sha256=k0bo4KHiHLRax5K3XKTTrf2Fi8V91mJ-R3FMdh6Reg0,2002
179
+ replay_rec-0.18.0rc0.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
180
+ replay_rec-0.18.0rc0.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: poetry-core 1.6.1
2
+ Generator: poetry-core 1.9.0
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any