replay-rec 0.20.0__py3-none-any.whl → 0.20.0rc0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (128) hide show
  1. replay/__init__.py +1 -1
  2. replay/data/dataset.py +10 -9
  3. replay/data/dataset_utils/dataset_label_encoder.py +5 -4
  4. replay/data/nn/schema.py +9 -18
  5. replay/data/nn/sequence_tokenizer.py +16 -15
  6. replay/data/nn/sequential_dataset.py +4 -4
  7. replay/data/nn/torch_sequential_dataset.py +5 -4
  8. replay/data/nn/utils.py +2 -1
  9. replay/data/schema.py +3 -12
  10. replay/experimental/__init__.py +0 -0
  11. replay/experimental/metrics/__init__.py +62 -0
  12. replay/experimental/metrics/base_metric.py +603 -0
  13. replay/experimental/metrics/coverage.py +97 -0
  14. replay/experimental/metrics/experiment.py +175 -0
  15. replay/experimental/metrics/hitrate.py +26 -0
  16. replay/experimental/metrics/map.py +30 -0
  17. replay/experimental/metrics/mrr.py +18 -0
  18. replay/experimental/metrics/ncis_precision.py +31 -0
  19. replay/experimental/metrics/ndcg.py +49 -0
  20. replay/experimental/metrics/precision.py +22 -0
  21. replay/experimental/metrics/recall.py +25 -0
  22. replay/experimental/metrics/rocauc.py +49 -0
  23. replay/experimental/metrics/surprisal.py +90 -0
  24. replay/experimental/metrics/unexpectedness.py +76 -0
  25. replay/experimental/models/__init__.py +50 -0
  26. replay/experimental/models/admm_slim.py +257 -0
  27. replay/experimental/models/base_neighbour_rec.py +200 -0
  28. replay/experimental/models/base_rec.py +1386 -0
  29. replay/experimental/models/base_torch_rec.py +234 -0
  30. replay/experimental/models/cql.py +454 -0
  31. replay/experimental/models/ddpg.py +932 -0
  32. replay/experimental/models/dt4rec/__init__.py +0 -0
  33. replay/experimental/models/dt4rec/dt4rec.py +189 -0
  34. replay/experimental/models/dt4rec/gpt1.py +401 -0
  35. replay/experimental/models/dt4rec/trainer.py +127 -0
  36. replay/experimental/models/dt4rec/utils.py +264 -0
  37. replay/experimental/models/extensions/spark_custom_models/__init__.py +0 -0
  38. replay/experimental/models/extensions/spark_custom_models/als_extension.py +792 -0
  39. replay/experimental/models/hierarchical_recommender.py +331 -0
  40. replay/experimental/models/implicit_wrap.py +131 -0
  41. replay/experimental/models/lightfm_wrap.py +303 -0
  42. replay/experimental/models/mult_vae.py +332 -0
  43. replay/experimental/models/neural_ts.py +986 -0
  44. replay/experimental/models/neuromf.py +406 -0
  45. replay/experimental/models/scala_als.py +293 -0
  46. replay/experimental/models/u_lin_ucb.py +115 -0
  47. replay/experimental/nn/data/__init__.py +1 -0
  48. replay/experimental/nn/data/schema_builder.py +102 -0
  49. replay/experimental/preprocessing/__init__.py +3 -0
  50. replay/experimental/preprocessing/data_preparator.py +839 -0
  51. replay/experimental/preprocessing/padder.py +229 -0
  52. replay/experimental/preprocessing/sequence_generator.py +208 -0
  53. replay/experimental/scenarios/__init__.py +1 -0
  54. replay/experimental/scenarios/obp_wrapper/__init__.py +8 -0
  55. replay/experimental/scenarios/obp_wrapper/obp_optuna_objective.py +74 -0
  56. replay/experimental/scenarios/obp_wrapper/replay_offline.py +261 -0
  57. replay/experimental/scenarios/obp_wrapper/utils.py +85 -0
  58. replay/experimental/scenarios/two_stages/__init__.py +0 -0
  59. replay/experimental/scenarios/two_stages/reranker.py +117 -0
  60. replay/experimental/scenarios/two_stages/two_stages_scenario.py +757 -0
  61. replay/experimental/utils/__init__.py +0 -0
  62. replay/experimental/utils/logger.py +24 -0
  63. replay/experimental/utils/model_handler.py +186 -0
  64. replay/experimental/utils/session_handler.py +44 -0
  65. replay/metrics/base_metric.py +11 -10
  66. replay/metrics/categorical_diversity.py +8 -8
  67. replay/metrics/coverage.py +4 -4
  68. replay/metrics/experiment.py +3 -3
  69. replay/metrics/hitrate.py +1 -3
  70. replay/metrics/map.py +1 -3
  71. replay/metrics/mrr.py +1 -3
  72. replay/metrics/ndcg.py +1 -2
  73. replay/metrics/novelty.py +3 -3
  74. replay/metrics/offline_metrics.py +16 -16
  75. replay/metrics/precision.py +1 -3
  76. replay/metrics/recall.py +1 -3
  77. replay/metrics/rocauc.py +1 -3
  78. replay/metrics/surprisal.py +4 -4
  79. replay/metrics/torch_metrics_builder.py +13 -12
  80. replay/metrics/unexpectedness.py +2 -2
  81. replay/models/als.py +2 -2
  82. replay/models/association_rules.py +4 -3
  83. replay/models/base_neighbour_rec.py +3 -2
  84. replay/models/base_rec.py +11 -10
  85. replay/models/cat_pop_rec.py +2 -1
  86. replay/models/extensions/ann/ann_mixin.py +2 -1
  87. replay/models/extensions/ann/index_builders/executor_hnswlib_index_builder.py +2 -1
  88. replay/models/extensions/ann/index_builders/executor_nmslib_index_builder.py +2 -1
  89. replay/models/lin_ucb.py +3 -3
  90. replay/models/nn/optimizer_utils/optimizer_factory.py +2 -2
  91. replay/models/nn/sequential/bert4rec/dataset.py +2 -2
  92. replay/models/nn/sequential/bert4rec/lightning.py +3 -3
  93. replay/models/nn/sequential/bert4rec/model.py +2 -2
  94. replay/models/nn/sequential/callbacks/prediction_callbacks.py +12 -12
  95. replay/models/nn/sequential/callbacks/validation_callback.py +9 -9
  96. replay/models/nn/sequential/compiled/base_compiled_model.py +5 -5
  97. replay/models/nn/sequential/postprocessors/_base.py +2 -3
  98. replay/models/nn/sequential/postprocessors/postprocessors.py +10 -10
  99. replay/models/nn/sequential/sasrec/lightning.py +3 -3
  100. replay/models/nn/sequential/sasrec/model.py +8 -8
  101. replay/models/slim.py +2 -2
  102. replay/models/ucb.py +2 -2
  103. replay/models/word2vec.py +3 -3
  104. replay/preprocessing/discretizer.py +8 -7
  105. replay/preprocessing/filters.py +4 -4
  106. replay/preprocessing/history_based_fp.py +6 -6
  107. replay/preprocessing/label_encoder.py +8 -7
  108. replay/scenarios/fallback.py +4 -3
  109. replay/splitters/base_splitter.py +3 -3
  110. replay/splitters/cold_user_random_splitter.py +4 -4
  111. replay/splitters/k_folds.py +4 -4
  112. replay/splitters/last_n_splitter.py +10 -10
  113. replay/splitters/new_users_splitter.py +4 -4
  114. replay/splitters/random_splitter.py +4 -4
  115. replay/splitters/ratio_splitter.py +10 -10
  116. replay/splitters/time_splitter.py +6 -6
  117. replay/splitters/two_stage_splitter.py +4 -4
  118. replay/utils/__init__.py +1 -0
  119. replay/utils/common.py +1 -1
  120. replay/utils/session_handler.py +2 -2
  121. replay/utils/spark_utils.py +6 -5
  122. replay/utils/types.py +3 -1
  123. {replay_rec-0.20.0.dist-info → replay_rec-0.20.0rc0.dist-info}/METADATA +17 -17
  124. replay_rec-0.20.0rc0.dist-info/RECORD +194 -0
  125. replay_rec-0.20.0.dist-info/RECORD +0 -139
  126. {replay_rec-0.20.0.dist-info → replay_rec-0.20.0rc0.dist-info}/WHEEL +0 -0
  127. {replay_rec-0.20.0.dist-info → replay_rec-0.20.0rc0.dist-info}/licenses/LICENSE +0 -0
  128. {replay_rec-0.20.0.dist-info → replay_rec-0.20.0rc0.dist-info}/licenses/NOTICE +0 -0
@@ -1,4 +1,4 @@
1
- from typing import Optional, Tuple
1
+ from typing import Optional
2
2
 
3
3
  from replay.utils import DataFrameLike, PandasDataFrame, PolarsDataFrame, SparkDataFrame
4
4
 
@@ -48,20 +48,20 @@ class RandomSplitter(Splitter):
48
48
 
49
49
  def _random_split_spark(
50
50
  self, interactions: SparkDataFrame, threshold: float
51
- ) -> Tuple[SparkDataFrame, SparkDataFrame]:
51
+ ) -> tuple[SparkDataFrame, SparkDataFrame]:
52
52
  train, test = interactions.randomSplit([1 - threshold, threshold], self.seed)
53
53
  return train, test
54
54
 
55
55
  def _random_split_pandas(
56
56
  self, interactions: PandasDataFrame, threshold: float
57
- ) -> Tuple[PandasDataFrame, PandasDataFrame]:
57
+ ) -> tuple[PandasDataFrame, PandasDataFrame]:
58
58
  train = interactions.sample(frac=(1 - threshold), random_state=self.seed)
59
59
  test = interactions.drop(train.index)
60
60
  return train, test
61
61
 
62
62
  def _random_split_polars(
63
63
  self, interactions: PolarsDataFrame, threshold: float
64
- ) -> Tuple[PolarsDataFrame, PolarsDataFrame]:
64
+ ) -> tuple[PolarsDataFrame, PolarsDataFrame]:
65
65
  train_size = int(len(interactions) * (1 - threshold)) + 1
66
66
  shuffled_interactions = interactions.sample(fraction=1, shuffle=True, seed=self.seed)
67
67
  train = shuffled_interactions[:train_size]
@@ -1,4 +1,4 @@
1
- from typing import List, Optional, Tuple
1
+ from typing import Optional
2
2
 
3
3
  import polars as pl
4
4
 
@@ -200,7 +200,7 @@ class RatioSplitter(Splitter):
200
200
 
201
201
  def _partial_split_fractions(
202
202
  self, interactions: DataFrameLike, ratio: float
203
- ) -> Tuple[DataFrameLike, DataFrameLike]:
203
+ ) -> tuple[DataFrameLike, DataFrameLike]:
204
204
  res = self._add_time_partition(interactions)
205
205
  train_size = round(1 - ratio, self._precision)
206
206
 
@@ -212,7 +212,7 @@ class RatioSplitter(Splitter):
212
212
 
213
213
  def _partial_split_fractions_pandas(
214
214
  self, interactions: PandasDataFrame, train_size: float
215
- ) -> Tuple[PandasDataFrame, PandasDataFrame]:
215
+ ) -> tuple[PandasDataFrame, PandasDataFrame]:
216
216
  interactions["count"] = interactions.groupby(self.divide_column, sort=False)[self.divide_column].transform(len)
217
217
  interactions["frac"] = (interactions["row_num"] / interactions["count"]).round(self._precision)
218
218
  if self.min_interactions_per_group is not None:
@@ -229,7 +229,7 @@ class RatioSplitter(Splitter):
229
229
 
230
230
  def _partial_split_fractions_spark(
231
231
  self, interactions: SparkDataFrame, train_size: float
232
- ) -> Tuple[SparkDataFrame, SparkDataFrame]:
232
+ ) -> tuple[SparkDataFrame, SparkDataFrame]:
233
233
  interactions = interactions.withColumn(
234
234
  "count", sf.count(self.timestamp_column).over(Window.partitionBy(self.divide_column))
235
235
  )
@@ -257,7 +257,7 @@ class RatioSplitter(Splitter):
257
257
 
258
258
  def _partial_split_fractions_polars(
259
259
  self, interactions: PolarsDataFrame, train_size: float
260
- ) -> Tuple[PolarsDataFrame, PolarsDataFrame]:
260
+ ) -> tuple[PolarsDataFrame, PolarsDataFrame]:
261
261
  interactions = interactions.with_columns(
262
262
  pl.count(self.timestamp_column).over(pl.col(self.divide_column)).alias("count")
263
263
  )
@@ -282,7 +282,7 @@ class RatioSplitter(Splitter):
282
282
 
283
283
  return train, test
284
284
 
285
- def _partial_split(self, interactions: DataFrameLike, ratio: float) -> Tuple[DataFrameLike, DataFrameLike]:
285
+ def _partial_split(self, interactions: DataFrameLike, ratio: float) -> tuple[DataFrameLike, DataFrameLike]:
286
286
  res = self._add_time_partition(interactions)
287
287
  if isinstance(res, SparkDataFrame):
288
288
  return self._partial_split_spark(res, ratio)
@@ -293,7 +293,7 @@ class RatioSplitter(Splitter):
293
293
 
294
294
  def _partial_split_pandas(
295
295
  self, interactions: PandasDataFrame, ratio: float
296
- ) -> Tuple[PandasDataFrame, PandasDataFrame]:
296
+ ) -> tuple[PandasDataFrame, PandasDataFrame]:
297
297
  interactions["count"] = interactions.groupby(self.divide_column, sort=False)[self.divide_column].transform(len)
298
298
  interactions["train_size"] = interactions["count"] - (interactions["count"] * ratio).astype(int)
299
299
  if self.min_interactions_per_group is not None:
@@ -319,7 +319,7 @@ class RatioSplitter(Splitter):
319
319
 
320
320
  return train, test
321
321
 
322
- def _partial_split_spark(self, interactions: SparkDataFrame, ratio: float) -> Tuple[SparkDataFrame, SparkDataFrame]:
322
+ def _partial_split_spark(self, interactions: SparkDataFrame, ratio: float) -> tuple[SparkDataFrame, SparkDataFrame]:
323
323
  interactions = interactions.withColumn(
324
324
  "count", sf.count(self.timestamp_column).over(Window.partitionBy(self.divide_column))
325
325
  )
@@ -352,7 +352,7 @@ class RatioSplitter(Splitter):
352
352
 
353
353
  def _partial_split_polars(
354
354
  self, interactions: PolarsDataFrame, ratio: float
355
- ) -> Tuple[PolarsDataFrame, PolarsDataFrame]:
355
+ ) -> tuple[PolarsDataFrame, PolarsDataFrame]:
356
356
  interactions = interactions.with_columns(
357
357
  pl.count(self.timestamp_column).over(self.divide_column).alias("count")
358
358
  )
@@ -385,7 +385,7 @@ class RatioSplitter(Splitter):
385
385
 
386
386
  return train, test
387
387
 
388
- def _core_split(self, interactions: DataFrameLike) -> List[DataFrameLike]:
388
+ def _core_split(self, interactions: DataFrameLike) -> list[DataFrameLike]:
389
389
  if self.split_by_fractions:
390
390
  return self._partial_split_fractions(interactions, self.test_size)
391
391
  else:
@@ -1,5 +1,5 @@
1
1
  from datetime import datetime
2
- from typing import List, Optional, Tuple, Union
2
+ from typing import Optional, Union
3
3
 
4
4
  import polars as pl
5
5
 
@@ -150,7 +150,7 @@ class TimeSplitter(Splitter):
150
150
 
151
151
  def _partial_split(
152
152
  self, interactions: DataFrameLike, threshold: Union[datetime, str, int]
153
- ) -> Tuple[DataFrameLike, DataFrameLike]:
153
+ ) -> tuple[DataFrameLike, DataFrameLike]:
154
154
  if isinstance(threshold, str):
155
155
  threshold = datetime.strptime(threshold, self.time_column_format)
156
156
 
@@ -166,7 +166,7 @@ class TimeSplitter(Splitter):
166
166
 
167
167
  def _partial_split_pandas(
168
168
  self, interactions: PandasDataFrame, threshold: Union[datetime, str, int]
169
- ) -> Tuple[PandasDataFrame, PandasDataFrame]:
169
+ ) -> tuple[PandasDataFrame, PandasDataFrame]:
170
170
  res = interactions.copy(deep=True)
171
171
  if isinstance(threshold, float):
172
172
  res.sort_values(self.timestamp_column, inplace=True)
@@ -186,7 +186,7 @@ class TimeSplitter(Splitter):
186
186
 
187
187
  def _partial_split_spark(
188
188
  self, interactions: SparkDataFrame, threshold: Union[datetime, str, int]
189
- ) -> Tuple[SparkDataFrame, SparkDataFrame]:
189
+ ) -> tuple[SparkDataFrame, SparkDataFrame]:
190
190
  if isinstance(threshold, float):
191
191
  dates = interactions.select(self.timestamp_column).withColumn(
192
192
  "_row_number_by_ts", sf.row_number().over(Window.orderBy(self.timestamp_column))
@@ -208,7 +208,7 @@ class TimeSplitter(Splitter):
208
208
 
209
209
  def _partial_split_polars(
210
210
  self, interactions: PolarsDataFrame, threshold: Union[datetime, str, int]
211
- ) -> Tuple[PolarsDataFrame, PolarsDataFrame]:
211
+ ) -> tuple[PolarsDataFrame, PolarsDataFrame]:
212
212
  if isinstance(threshold, float):
213
213
  test_start = int(len(interactions) * (1 - threshold)) + 1
214
214
 
@@ -225,5 +225,5 @@ class TimeSplitter(Splitter):
225
225
 
226
226
  return train, test
227
227
 
228
- def _core_split(self, interactions: DataFrameLike) -> List[DataFrameLike]:
228
+ def _core_split(self, interactions: DataFrameLike) -> list[DataFrameLike]:
229
229
  return self._partial_split(interactions, self.time_threshold)
@@ -2,7 +2,7 @@
2
2
  This splitter split data by two columns.
3
3
  """
4
4
 
5
- from typing import Optional, Tuple
5
+ from typing import Optional
6
6
 
7
7
  import numpy as np
8
8
  import polars as pl
@@ -165,7 +165,7 @@ class TwoStageSplitter(Splitter):
165
165
 
166
166
  return test_users
167
167
 
168
- def _split_proportion_spark(self, interactions: SparkDataFrame) -> Tuple[SparkDataFrame, SparkDataFrame]:
168
+ def _split_proportion_spark(self, interactions: SparkDataFrame) -> tuple[SparkDataFrame, SparkDataFrame]:
169
169
  counts = interactions.groupBy(self.first_divide_column).count()
170
170
  test_users = self._get_test_values(interactions).withColumn("is_test", sf.lit(True))
171
171
  if self.shuffle:
@@ -197,7 +197,7 @@ class TwoStageSplitter(Splitter):
197
197
 
198
198
  return train, test
199
199
 
200
- def _split_proportion_pandas(self, interactions: PandasDataFrame) -> Tuple[PandasDataFrame, PandasDataFrame]:
200
+ def _split_proportion_pandas(self, interactions: PandasDataFrame) -> tuple[PandasDataFrame, PandasDataFrame]:
201
201
  counts = (
202
202
  interactions.groupby(self.first_divide_column).agg(count=(self.first_divide_column, "count")).reset_index()
203
203
  )
@@ -224,7 +224,7 @@ class TwoStageSplitter(Splitter):
224
224
 
225
225
  return train, test
226
226
 
227
- def _split_proportion_polars(self, interactions: PolarsDataFrame) -> Tuple[PolarsDataFrame, PolarsDataFrame]:
227
+ def _split_proportion_polars(self, interactions: PolarsDataFrame) -> tuple[PolarsDataFrame, PolarsDataFrame]:
228
228
  counts = interactions.group_by(self.first_divide_column).count()
229
229
  test_users = self._get_test_values(interactions).with_columns(pl.lit(True).alias("is_test"))
230
230
  if self.shuffle:
replay/utils/__init__.py CHANGED
@@ -1,5 +1,6 @@
1
1
  from .types import (
2
2
  ANN_AVAILABLE,
3
+ LIGHTFM_AVAILABLE,
3
4
  OPENVINO_AVAILABLE,
4
5
  OPTUNA_AVAILABLE,
5
6
  PYSPARK_AVAILABLE,
replay/utils/common.py CHANGED
@@ -73,7 +73,7 @@ def load_from_replay(path: Union[str, Path], **kwargs) -> SavableObject:
73
73
  :param path: Path to save the object.
74
74
  """
75
75
  path = Path(path).with_suffix(".replay").resolve()
76
- with open(path / "init_args.json", "r") as file:
76
+ with open(path / "init_args.json") as file:
77
77
  class_name = json.loads(file.read())["_class_name"]
78
78
  obj_type = globals()[class_name]
79
79
  obj = obj_type.load(path, **kwargs)
@@ -6,7 +6,7 @@ import logging
6
6
  import os
7
7
  import sys
8
8
  from math import floor
9
- from typing import Any, Dict, Optional
9
+ from typing import Any, Optional
10
10
 
11
11
  import psutil
12
12
 
@@ -113,7 +113,7 @@ class Borg:
113
113
  This class allows to share objects between instances.
114
114
  """
115
115
 
116
- _shared_state: Dict[str, Any] = {}
116
+ _shared_state: dict[str, Any] = {}
117
117
 
118
118
  def __init__(self):
119
119
  self.__dict__ = self._shared_state
@@ -3,7 +3,8 @@ import logging
3
3
  import os
4
4
  import pickle
5
5
  import warnings
6
- from typing import Any, Iterable, List, Optional, Tuple, Union
6
+ from collections.abc import Iterable
7
+ from typing import Any, Optional, Union
7
8
 
8
9
  import numpy as np
9
10
  import pandas as pd
@@ -90,7 +91,7 @@ def convert2spark(data_frame: Optional[DataFrameLike]) -> Optional[SparkDataFram
90
91
  def get_top_k(
91
92
  dataframe: SparkDataFrame,
92
93
  partition_by_col: Column,
93
- order_by_col: List[Column],
94
+ order_by_col: list[Column],
94
95
  k: int,
95
96
  ) -> SparkDataFrame:
96
97
  """
@@ -393,7 +394,7 @@ def horizontal_explode(
393
394
  data_frame: SparkDataFrame,
394
395
  column_to_explode: str,
395
396
  prefix: str,
396
- other_columns: List[Column],
397
+ other_columns: list[Column],
397
398
  ) -> SparkDataFrame:
398
399
  """
399
400
  Transform a column with an array of values into separate columns.
@@ -509,7 +510,7 @@ def unpersist_if_exists(dataframe: Optional[SparkDataFrame]) -> None:
509
510
  def join_with_col_renaming(
510
511
  left: SparkDataFrame,
511
512
  right: SparkDataFrame,
512
- on_col_name: Union[str, List],
513
+ on_col_name: Union[str, list],
513
514
  how: str = "inner",
514
515
  suffix="join",
515
516
  ) -> SparkDataFrame:
@@ -698,7 +699,7 @@ def filter_cold(
698
699
  df: Optional[SparkDataFrame],
699
700
  warm_df: SparkDataFrame,
700
701
  col_name: str,
701
- ) -> Tuple[int, Optional[SparkDataFrame]]:
702
+ ) -> tuple[int, Optional[SparkDataFrame]]:
702
703
  """
703
704
  Filter out new user/item ids absent in `warm_df`.
704
705
  Return number of new users/items and filtered dataframe.
replay/utils/types.py CHANGED
@@ -1,5 +1,6 @@
1
+ from collections.abc import Iterable
1
2
  from importlib.util import find_spec
2
- from typing import Iterable, Union
3
+ from typing import Union
3
4
 
4
5
  from pandas import DataFrame as PandasDataFrame
5
6
  from polars import DataFrame as PolarsDataFrame
@@ -46,3 +47,4 @@ ANN_AVAILABLE = all(
46
47
  )
47
48
  OPENVINO_AVAILABLE = TORCH_AVAILABLE and find_spec("onnx") and find_spec("openvino")
48
49
  OPTUNA_AVAILABLE = find_spec("optuna")
50
+ LIGHTFM_AVAILABLE = find_spec("lightfm")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: replay-rec
3
- Version: 0.20.0
3
+ Version: 0.20.0rc0
4
4
  Summary: RecSys Library
5
5
  License-Expression: Apache-2.0
6
6
  License-File: LICENSE
@@ -14,29 +14,23 @@ Classifier: Intended Audience :: Developers
14
14
  Classifier: Intended Audience :: Science/Research
15
15
  Classifier: Natural Language :: English
16
16
  Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
17
- Provides-Extra: spark
18
- Provides-Extra: torch
19
- Provides-Extra: torch-cpu
20
- Requires-Dist: lightning (<2.6.0) ; extra == "torch" or extra == "torch-cpu"
21
- Requires-Dist: lightning ; extra == "torch"
22
- Requires-Dist: lightning ; extra == "torch-cpu"
17
+ Requires-Dist: d3rlpy (>=2.8.1,<2.9)
18
+ Requires-Dist: implicit (>=0.7.2,<0.8)
19
+ Requires-Dist: lightautoml (>=0.4.1,<0.5)
20
+ Requires-Dist: lightning (>=2.0.2,<=2.4.0)
21
+ Requires-Dist: numba (>=0.50,<1)
23
22
  Requires-Dist: numpy (>=1.20.0,<2)
24
23
  Requires-Dist: pandas (>=1.3.5,<2.4.0)
25
24
  Requires-Dist: polars (<2.0)
26
- Requires-Dist: psutil (<=7.0.0) ; extra == "spark"
27
- Requires-Dist: psutil ; extra == "spark"
25
+ Requires-Dist: psutil (<=7.0.0)
28
26
  Requires-Dist: pyarrow (<22.0)
29
- Requires-Dist: pyspark (>=3.0,<3.5) ; extra == "spark"
30
- Requires-Dist: pyspark ; extra == "spark"
31
- Requires-Dist: pytorch-optimizer (>=3.8.0,<3.9.0) ; extra == "torch" or extra == "torch-cpu"
32
- Requires-Dist: pytorch-optimizer ; extra == "torch"
33
- Requires-Dist: pytorch-optimizer ; extra == "torch-cpu"
27
+ Requires-Dist: pyspark (>=3.0,<3.5)
28
+ Requires-Dist: pytorch-optimizer (>=3.8.0,<4)
29
+ Requires-Dist: sb-obp (>=0.5.10,<0.6)
34
30
  Requires-Dist: scikit-learn (>=1.6.1,<1.7.0)
35
31
  Requires-Dist: scipy (>=1.13.1,<1.14)
36
32
  Requires-Dist: setuptools
37
- Requires-Dist: torch (>=1.8,<3.0.0) ; extra == "torch" or extra == "torch-cpu"
38
- Requires-Dist: torch ; extra == "torch"
39
- Requires-Dist: torch ; extra == "torch-cpu"
33
+ Requires-Dist: torch (>=1.8,<3.0.0)
40
34
  Requires-Dist: tqdm (>=4.67,<5)
41
35
  Project-URL: Homepage, https://sb-ai-lab.github.io/RePlay/
42
36
  Project-URL: Repository, https://github.com/sb-ai-lab/RePlay
@@ -245,6 +239,12 @@ pip install openvino onnx
245
239
  pip install hnswlib fixed-install-nmslib
246
240
  ```
247
241
 
242
+ 4) (Experimental) LightFM model support:
243
+ ```bash
244
+ pip install ligfhtfm
245
+ ```
246
+ > **_NOTE_** : LightFM is not officially supported for Python 3.12 due to discontinued maintenance of the library. If you wish to install it locally, you'll have to use a patched fork of LightFM, such as the [one used internally](https://github.com/daviddavo/lightfm).
247
+
248
248
 
249
249
  <a name="examples"></a>
250
250
  ## 📑 Resources
@@ -0,0 +1,194 @@
1
+ replay/__init__.py,sha256=ipveEnvBxH_l-u1NHjmSA5Oug46k31WtS6WR2nYjB6s,233
2
+ replay/data/__init__.py,sha256=g5bKRyF76QL_BqlED-31RnS8pBdcyj9loMsx5vAG_0E,301
3
+ replay/data/dataset.py,sha256=yQDc8lfphQYfHpm_T1MhnG8_GyM4ONyxJoFc1rUgdJ8,30755
4
+ replay/data/dataset_utils/__init__.py,sha256=9wUvG8ZwGUvuzLU4zQI5FDcH0WVVo5YLN2ey3DterP0,55
5
+ replay/data/dataset_utils/dataset_label_encoder.py,sha256=bxuJPhShFZBok7bQZYGNMV1etCLNTJUpyKO5MIwWack,9823
6
+ replay/data/nn/__init__.py,sha256=nj2Ep-tduuQkc-TnBkvN8-rDnFbcWO2oZrfcXl9M3C8,1122
7
+ replay/data/nn/schema.py,sha256=h1KgaNV-hgN9Vpt24c92EmeMpm_8W0s9a2M0wLxJHYk,17101
8
+ replay/data/nn/sequence_tokenizer.py,sha256=qTZm2lRrOizO6RjFyn_B9WAp1yhsQqknKbgU5tmvmoI,37268
9
+ replay/data/nn/sequential_dataset.py,sha256=h7iiqSaMN-NPlEJi1QD1GSA5q-yRASg4Hp3Wv6s7fxQ,11144
10
+ replay/data/nn/torch_sequential_dataset.py,sha256=wZ0leBw4IknnbY1H6CkolXaNszhRLIyNUJfJfMrtYF0,11566
11
+ replay/data/nn/utils.py,sha256=Ic3G4yZRIzBYXLmwP1VstlZXPNR7AYGCc5EyZAERp5c,3297
12
+ replay/data/schema.py,sha256=JmYLCrNgBS5oq4O_PT724Gr1pDurHEykcqV8Xaj0XTw,15922
13
+ replay/data/spark_schema.py,sha256=4o0Kn_fjwz2-9dBY3q46F9PL0F3E7jdVpIlX7SG3OZI,1111
14
+ replay/experimental/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
15
+ replay/experimental/metrics/__init__.py,sha256=bdQogGbEDVAeH7Ejbb6vpw7bP6CYhftTu_DQuoFRuCA,2861
16
+ replay/experimental/metrics/base_metric.py,sha256=0ro9VoSnPtPAximnlcgmQaMNg9zoUN2AHAH_2WgfZiQ,22663
17
+ replay/experimental/metrics/coverage.py,sha256=UqYm-WtAlBFZ3kqv8PyLo4qqKiIXmR_CQFAl6H_YdqA,3150
18
+ replay/experimental/metrics/experiment.py,sha256=pD2Dyyg4PM1HjbrNrhAspZJP3B-i2So205qBChRGwwc,7337
19
+ replay/experimental/metrics/hitrate.py,sha256=TfWJrUyZXabdMr4tn8zqUPGDcYy2yphVCzXmLSHCxY0,675
20
+ replay/experimental/metrics/map.py,sha256=S4dKiMpYR0_pu0bqioGMT0kIC1s2aojFP4rddBqMPtM,921
21
+ replay/experimental/metrics/mrr.py,sha256=q6I1Cndlwr716mMuYtTMu0lN8Rrp9khxhb49OM2IpV8,530
22
+ replay/experimental/metrics/ncis_precision.py,sha256=yrErOhBZvZdNpQPx_AXyktDJatqdWRIHNMyei0QDJtQ,1088
23
+ replay/experimental/metrics/ndcg.py,sha256=q3KTsyZCrfvcpEjEnR_kWVB9ZaTFRxnoNRAr2WD0TrU,1538
24
+ replay/experimental/metrics/precision.py,sha256=U9pD9yRGeT8uH32BTyQ-W5qsAnbFWu-pqy4XfkcXfCM,664
25
+ replay/experimental/metrics/recall.py,sha256=5xRPGxfbVoDFEI5E6dVlZpT4RvnDlWzaktyoqh3a8mc,774
26
+ replay/experimental/metrics/rocauc.py,sha256=yq4vW2_bXO8HCjREBZVrHMKeZ054LYvjJmLJTXWPfQA,1675
27
+ replay/experimental/metrics/surprisal.py,sha256=CK4_zed2bSMDwC7ZBCS8d8RwGEqt8bh3w3fTpjKiK6Y,3052
28
+ replay/experimental/metrics/unexpectedness.py,sha256=JQQXEYHtQM8nqp7X2He4E9ZYwbpdENaK8oQG7sUQT3s,2621
29
+ replay/experimental/models/__init__.py,sha256=yeu0PAkqWNqNLDnUYpg0_vpkWT8tG8KmRMybodVFkZ4,1709
30
+ replay/experimental/models/admm_slim.py,sha256=dDg2c_5Lk8acykirtsv38Jg1l6kgAoBhRvPHPv5Vfis,8654
31
+ replay/experimental/models/base_neighbour_rec.py,sha256=Q2C4rle9FeVIncqgMuhLV6qZbPj2Bz8W_Ao8iQu31TU,7387
32
+ replay/experimental/models/base_rec.py,sha256=AmN6-PgIaNzD-sMIndMuRA3TJ0WZBbowCjaSTTgiYrY,54150
33
+ replay/experimental/models/base_torch_rec.py,sha256=mwbbsR-sQuQAFC1d8X2k0zP3iJeEP-X5nAaR3IV7Sqg,8105
34
+ replay/experimental/models/cql.py,sha256=ItTukqhH3V-PItVPawET9zO9tG4D8R4xKzz3tqKMjSc,19619
35
+ replay/experimental/models/ddpg.py,sha256=bzX4KvkuIecYA4bkFB1BnLKE3zqteujhpvsxAXEnKoM,32266
36
+ replay/experimental/models/dt4rec/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
37
+ replay/experimental/models/dt4rec/dt4rec.py,sha256=zcxn2MjrJg8eYqfGwfK80UjH2-uwNDg4PBbmQZz7Le0,5895
38
+ replay/experimental/models/dt4rec/gpt1.py,sha256=T3buFtYyF6Fh6sW6f9dUZFcFEnQdljItbRa22CiKb0w,14044
39
+ replay/experimental/models/dt4rec/trainer.py,sha256=YeaJ8mnoYZqnPwm1P9qOYb8GzgFC5At-JeSDcvG2V2o,3859
40
+ replay/experimental/models/dt4rec/utils.py,sha256=UF--cukjFB3uwzqaVHdCS3ik2qTtw97tzbSFGPkDfE8,8153
41
+ replay/experimental/models/extensions/spark_custom_models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
42
+ replay/experimental/models/extensions/spark_custom_models/als_extension.py,sha256=R9Xj5Yew5eH3dMJ9qfIyhBg4NeMv-wXVEISgzCwFQz0,25846
43
+ replay/experimental/models/hierarchical_recommender.py,sha256=BqnEFBppKawt8Xx5lzBWk6qnmdCrZ7c2hpKj3mi1GvU,11441
44
+ replay/experimental/models/implicit_wrap.py,sha256=8F-f-CaStmlNHwphu-yu8o4Aft08NKDD_SqqH0zp1Uo,4655
45
+ replay/experimental/models/lightfm_wrap.py,sha256=rA9T2vGjrbt_GJV1XccYYsrs9qtgDtqVJCWBHFYrm4k,11329
46
+ replay/experimental/models/mult_vae.py,sha256=l-6g-2fIs80vxBl9VGY4FrJannAXrzsQOyGNuHU8tDs,11601
47
+ replay/experimental/models/neural_ts.py,sha256=oCqStgGg5CpGFAv1dC-3ODmK9nI05evzJ3XKBDQhgAo,42535
48
+ replay/experimental/models/neuromf.py,sha256=acC50kxYlctriNGqyOEkq57Iu4icUvZasyWFeRUJans,14386
49
+ replay/experimental/models/scala_als.py,sha256=6aMl8hUFR2J_nI5U8Z_-5BxfeATiWnC8zdj1C0AFbm4,10751
50
+ replay/experimental/models/u_lin_ucb.py,sha256=-gu6meOYeSwP6N8ILtwasWYj4Mbs6EJEFQXUHE8N_lY,3750
51
+ replay/experimental/nn/data/__init__.py,sha256=5EAF-FNd7xhkUpTq_5MyVcPXBD81mJCwYrcbhdGOWjE,48
52
+ replay/experimental/nn/data/schema_builder.py,sha256=nfE0-bVgYUwzyhNTTcXUWhfNBAZQLHWenM6-zEglqps,3301
53
+ replay/experimental/preprocessing/__init__.py,sha256=uMyeyQ_GKqjLhVGwhrEk3NLhhzS0DKi5xGo3VF4WkiA,130
54
+ replay/experimental/preprocessing/data_preparator.py,sha256=-yqWZT06iEYsY7rCSGRAgLcp6o7jvlsU431HspHQ2o4,35940
55
+ replay/experimental/preprocessing/padder.py,sha256=uxE6WlmYNd9kbACMEidxG1L19G5Rk0gQbvpN_TosMZ4,9558
56
+ replay/experimental/preprocessing/sequence_generator.py,sha256=vFtLkq9MuLGThPsa67103qlcMLYLfnAkR_HI1FXPwjw,9047
57
+ replay/experimental/scenarios/__init__.py,sha256=gWFLCkLyOmOppvbRMK7C3UMlMpcbIgiGVolSH6LPgWA,91
58
+ replay/experimental/scenarios/obp_wrapper/__init__.py,sha256=ZOJgpjRsmhXTpzGumk3AALKmstNBachtu_hOXUIPY5s,434
59
+ replay/experimental/scenarios/obp_wrapper/obp_optuna_objective.py,sha256=swwcot05a8GzIVhEKpfmjG16CuciItVuddPaOjCKo9o,2543
60
+ replay/experimental/scenarios/obp_wrapper/replay_offline.py,sha256=9ZP17steBiTh_KO37NnXWyN5LuPpABPhL_QG4JJHf7I,9622
61
+ replay/experimental/scenarios/obp_wrapper/utils.py,sha256=Uv_fqyJDt69vIdrw-Y9orLLzyHG0ko8svza0Hs_a87Q,3233
62
+ replay/experimental/scenarios/two_stages/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
63
+ replay/experimental/scenarios/two_stages/reranker.py,sha256=NQhooA3OXLAh_PwydBNU2DGRRGPq2j2R0SSHtDM7hlg,4238
64
+ replay/experimental/scenarios/two_stages/two_stages_scenario.py,sha256=u41ymdhx0MS1I08VDjJ2UhXpSqsfTA1x9Hbz1tOaWLY,29822
65
+ replay/experimental/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
66
+ replay/experimental/utils/logger.py,sha256=UwLowaeOG17sDEe32LiZel8MnjSTzeW7J3uLG1iwLuA,639
67
+ replay/experimental/utils/model_handler.py,sha256=Rfj57E1R_XMEEigHNZa9a-rzEsyLWSDsgKfXoRzWWdg,6426
68
+ replay/experimental/utils/session_handler.py,sha256=H0C-Q2pqrs_5aDvoAkRMZuS5qu07uhu6g5FEL3NJiic,1305
69
+ replay/metrics/__init__.py,sha256=j0PGvUehaPEZMNo9SQwJsnvzrS4bam9eHrRMQFLnMjY,2813
70
+ replay/metrics/base_metric.py,sha256=ejtwFHktN4J8Fi1HIM3w0zlMAd8nO7-XpFi2D1iHXUQ,16010
71
+ replay/metrics/categorical_diversity.py,sha256=3tp8n457Ob4gjM-UTB5N19u9WAF7fLDkWKk-Mth-Vzc,10769
72
+ replay/metrics/coverage.py,sha256=e6vPItrRlI-mLNuOT5uoo5lMAAzkYGKZRxvupi21dMk,8528
73
+ replay/metrics/descriptors.py,sha256=BHORyGKfJgPeUjgLO0u2urSTe16UQbb-HHh8soqnwDE,3893
74
+ replay/metrics/experiment.py,sha256=6Sw8PyItn3E2R-BBa_YwrmtBV3n0uAGHHOvkhHYgMz4,8125
75
+ replay/metrics/hitrate.py,sha256=LcOJLMs3_Dq4_pbKx95qdCdjGrX52dyWyuWUFXCyaDw,2314
76
+ replay/metrics/map.py,sha256=dIZcmUxd2XnNC7d_d7gmq0cjNaI1hlNMaJTSHGCokQE,2572
77
+ replay/metrics/mrr.py,sha256=qM8tVMSoyYR-kTx0mnBGppoC53SxNlZKm7JKMUmSv9U,2163
78
+ replay/metrics/ndcg.py,sha256=izajmD243ZIK3KLm9M-NtLwxb9N3Ktj58__AAfwF6Vc,3110
79
+ replay/metrics/novelty.py,sha256=j3p1fbUVi2QQgEre42jeQx73PYYDUhy5gYlrL4BL5b8,5488
80
+ replay/metrics/offline_metrics.py,sha256=f_U4Tk3Ke5sR0_OYvoE2_nD6wrOCveg3DM3B9pStVUI,20454
81
+ replay/metrics/precision.py,sha256=DRlsgY_b4bJCOSZjCA58N41REMiDt-dbagRSXxfXyvY,2256
82
+ replay/metrics/recall.py,sha256=fzpASDiH88zcpXJZTbStQ3nuzzSdhd9k1wjF27rM4wc,2447
83
+ replay/metrics/rocauc.py,sha256=1vaVEK7DQTL8BX-i7A64hTFWyO38aNycscPGrdWKwbA,3282
84
+ replay/metrics/surprisal.py,sha256=HkmYrOuw3jydxFrkidjdcpAcKz2DeOnMsKqwB2g9pwY,7526
85
+ replay/metrics/torch_metrics_builder.py,sha256=jccdTNXJrwiWmBoD9cB3ilIn-upKhR6toAfKTG5T2Mc,13855
86
+ replay/metrics/unexpectedness.py,sha256=LSi-z50l3_yrvLnmToHQzm6Ygf2QpNt_zhk6jdg7QUo,6882
87
+ replay/models/__init__.py,sha256=kECYluQZ83zRUWaHVvnt7Tg3BerHrJy9v8XfRxsqyYY,1123
88
+ replay/models/als.py,sha256=1MFAbcx64tv0MX1wE9CM1NxKD3F3ZDhZUrmt6dvHu74,6220
89
+ replay/models/association_rules.py,sha256=shBNsKjlii0YK-XA6bSl5Ov0ZFTnjxZbgKJU9PFYptY,14507
90
+ replay/models/base_neighbour_rec.py,sha256=SdGb2ejpYjHmxFNTk5zwEo0RWdfPAj1vKGP_oj7IrQo,7783
91
+ replay/models/base_rec.py,sha256=aNIEbSy8G5q92NOpDlSJbp0Z-lAkazFLa9eDAajl1wI,56067
92
+ replay/models/cat_pop_rec.py,sha256=ed1X1PDQY41hFJ1cO3Q5OWy0rXhV5_n23hJ-QHWONtE,11968
93
+ replay/models/cluster.py,sha256=9JcpGnbfgFa4UsyxPAa4WMuJFa3rsuAxiKoy-s_UfyE,4970
94
+ replay/models/common.py,sha256=rFmfwwzWCWED2HaDVuSN7ZUAgaNPGPawUudgn4IApbo,2121
95
+ replay/models/extensions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
96
+ replay/models/extensions/ann/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
97
+ replay/models/extensions/ann/ann_mixin.py,sha256=Ua1fuwrvtISNDQ8iPV-ln8S1LDKz8-rIU2UYsMExAiU,7782
98
+ replay/models/extensions/ann/entities/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
99
+ replay/models/extensions/ann/entities/base_hnsw_param.py,sha256=5GRdcQj4-zhNXfJ7ko2WHGHgRuXCzSHCRcRxljl1V4c,776
100
+ replay/models/extensions/ann/entities/hnswlib_param.py,sha256=j3V4JXM_yfR6s2TjYakIXMg-zS1-MrP6an930DEIWGM,2104
101
+ replay/models/extensions/ann/entities/nmslib_hnsw_param.py,sha256=WeEhRR4jKqgvWK_zDK8fx6kEqc2e_bc0kubvqK3iV8c,2162
102
+ replay/models/extensions/ann/index_builders/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
103
+ replay/models/extensions/ann/index_builders/base_index_builder.py,sha256=Ul25G0FaNLOXUjrDXxZDTg7tLXlv1N6wR8kWjWICtZ0,2110
104
+ replay/models/extensions/ann/index_builders/driver_hnswlib_index_builder.py,sha256=U8-3lRahyWmWkZ7tYuO-Avd1jX-lGh7JukC140wJ-WQ,1600
105
+ replay/models/extensions/ann/index_builders/driver_nmslib_index_builder.py,sha256=1NLWyAJGYgp46uUBhUYQyd0stmG6DhLh7U4JEne5TFw,1308
106
+ replay/models/extensions/ann/index_builders/executor_hnswlib_index_builder.py,sha256=cf3LhBCRRN-lBYGlJbv8vnY-KVeHAleN5cVjvd58Ibs,2476
107
+ replay/models/extensions/ann/index_builders/executor_nmslib_index_builder.py,sha256=0DPJ3WAt0cZ5dmtZv87fmMEgYXWf8rM35f7CA_DgWZY,2618
108
+ replay/models/extensions/ann/index_builders/nmslib_index_builder_mixin.py,sha256=AIkVnobesnTM5lrBSWf9gd0CySwFQ0vH_DjemfLS4Cs,1925
109
+ replay/models/extensions/ann/index_inferers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
110
+ replay/models/extensions/ann/index_inferers/base_inferer.py,sha256=I39aqEc2somfndrCd-KC3XYZnYSrJ2hGpR9y6wO93NA,2524
111
+ replay/models/extensions/ann/index_inferers/hnswlib_filter_index_inferer.py,sha256=JjT4l_XAjzUOsTAE7OS88zAgPd_h_O44oUnn2kVr8E0,2477
112
+ replay/models/extensions/ann/index_inferers/hnswlib_index_inferer.py,sha256=CoY_oMfdcwnh87ceuSpHXu4Czle9xxeMisO8XJUuJLE,1717
113
+ replay/models/extensions/ann/index_inferers/nmslib_filter_index_inferer.py,sha256=tjuqbkztWBU4K6qp5LPFU_GOGJf2f4oXneExtUEVUzw,3128
114
+ replay/models/extensions/ann/index_inferers/nmslib_index_inferer.py,sha256=S5eCBZlTXxEAeX6yeZGC7j56gOcJ7lMNb4Cs_5PEj9E,2203
115
+ replay/models/extensions/ann/index_inferers/utils.py,sha256=6IST2FPSY3nuYu5KqzRpd4FgdaV3GnQRQlxp9LN_yyA,641
116
+ replay/models/extensions/ann/index_stores/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
117
+ replay/models/extensions/ann/index_stores/base_index_store.py,sha256=u4l2ybAXX92ZMGK7NqqosbKF75QgFqhAMVadd5ePj6Y,910
118
+ replay/models/extensions/ann/index_stores/hdfs_index_store.py,sha256=0zDq9PdDOiD6HvtZlfjTbuJHfeTOWBTQ_HiuqZmoxtA,3090
119
+ replay/models/extensions/ann/index_stores/shared_disk_index_store.py,sha256=thl4T1uYU4Gtk4nBao_qK8CbFBdX1xmXNishxgfCd-I,2030
120
+ replay/models/extensions/ann/index_stores/spark_files_index_store.py,sha256=QP_8mE7EIBbePIe0AB-IWuJLRA5MR3wswCEt8oHzn-0,3617
121
+ replay/models/extensions/ann/index_stores/utils.py,sha256=6r2GP_EFCaCguolW857pb4lRS8rh6_Nv_Edso9_j5no,3756
122
+ replay/models/extensions/ann/utils.py,sha256=AgQvThi_DvEtakQeTno9hVZVWiWMFHKTjRcQ2wLa5vk,1222
123
+ replay/models/kl_ucb.py,sha256=L6vC2KsTBTTx4ckmGhWybOiLa5Wt54N7cgl7jS2FQRg,6731
124
+ replay/models/knn.py,sha256=HEiGHHQg9pV1_EIWZHfK-XD0BNAm1bj1c0ND9rYnj3k,8992
125
+ replay/models/lin_ucb.py,sha256=J9h4vtJE9JpitzJIUXLuVWpo9Pdixf2ZrXET7J62nIg,16909
126
+ replay/models/nn/__init__.py,sha256=AT3o1qXaxUq4_QIGlcGuSs54ZpueOo-SbpZwuGI-6os,41
127
+ replay/models/nn/loss/__init__.py,sha256=s3iO9QTZvLz_ony2b5K0hEmDmitrXQnAe9j6BRxLpR4,53
128
+ replay/models/nn/loss/sce.py,sha256=p6LFtoYSY4j2pQh6Z7i6cEADCmRnvTgnb8EJXseRKKg,5637
129
+ replay/models/nn/optimizer_utils/__init__.py,sha256=8MHln7CW54oACVUFKdZLjAf4bY83GcOMXpmL87gTnhI,178
130
+ replay/models/nn/optimizer_utils/optimizer_factory.py,sha256=1wicKnya2xrwDaHhqygy1VqB8-3jPDhMM7zY2TJE4dY,2844
131
+ replay/models/nn/sequential/__init__.py,sha256=CI2n0cxs_amqJrwBMq6n0Z_uBOu7CGXfagqvE4Jlmjw,128
132
+ replay/models/nn/sequential/bert4rec/__init__.py,sha256=JfZqHOGxcvOkICl5cWmZbZhaKXpkIvua-Wj57VWWEhw,399
133
+ replay/models/nn/sequential/bert4rec/dataset.py,sha256=Bfa9sw0qzf5lJ7_3t1AgTfZO3yWFHHrSz7ViDOSOipQ,10993
134
+ replay/models/nn/sequential/bert4rec/lightning.py,sha256=_hP6_6E1SpGu6b_kiYEF4ZVhwKJ4sj_iPTo6loIvM0o,26546
135
+ replay/models/nn/sequential/bert4rec/model.py,sha256=2Lqvfz7UBB_ArqNs92OD5dy4a1onR4S5dNZiMbZgAgk,17388
136
+ replay/models/nn/sequential/callbacks/__init__.py,sha256=Q7mSZ_RB6iyD7QZaBL_NJ0uh8cRfgxq7gtPHbkSyhoo,282
137
+ replay/models/nn/sequential/callbacks/prediction_callbacks.py,sha256=lkYoTOpyt-Gy9wOgOgGlFshTJP24VsDcWm_6pY5Xmyg,9296
138
+ replay/models/nn/sequential/callbacks/validation_callback.py,sha256=GcRWM_yVwRBRLTW1sYCy9_aNZ5C71hmJeGG61Yyh4vA,5812
139
+ replay/models/nn/sequential/compiled/__init__.py,sha256=eSVcCaUH5cDJQRbC7K99X7uMNR-Z-KR4TmYOGKWWJCI,531
140
+ replay/models/nn/sequential/compiled/base_compiled_model.py,sha256=vOL-9jodvSNc7N32V4lTjRTCNM-tOPAxfMyZPENqsFA,10231
141
+ replay/models/nn/sequential/compiled/bert4rec_compiled.py,sha256=Z6nfmdT70Wi-j7_CDFJ88iNCp1gdQleg1WkfHp0hb4s,6400
142
+ replay/models/nn/sequential/compiled/sasrec_compiled.py,sha256=qUaAwQOsBCstOG3RBlj_pJpD8BHmCpLZWCiPBlFVvT4,5856
143
+ replay/models/nn/sequential/postprocessors/__init__.py,sha256=89LGzkNHukcuC2-rfpiz7vmv1zyk6MNY-8zaXrvtn0M,164
144
+ replay/models/nn/sequential/postprocessors/_base.py,sha256=Q_SIYKG8G3U03IEK1dtlW1zJI300pOcWQYuMpkY0_nc,1111
145
+ replay/models/nn/sequential/postprocessors/postprocessors.py,sha256=hXPtAJzfpUqs5kshbFDNmw3Kbva4QignYHF-Suf6sY4,7802
146
+ replay/models/nn/sequential/sasrec/__init__.py,sha256=c6130lRpPkcbuGgkM7slagBIgH7Uk5zUtSzFDEwAsik,250
147
+ replay/models/nn/sequential/sasrec/dataset.py,sha256=2QpWsXnyZiODSuCesQp3GALA4GsSkwSQQnkTyygdOnU,7768
148
+ replay/models/nn/sequential/sasrec/lightning.py,sha256=oScUyB8RU8N4MqWe6kAoWG0JW6Tkb2ldG_jdGFZgA7A,25060
149
+ replay/models/nn/sequential/sasrec/model.py,sha256=8kFovyPWqgQ0hmD3gckRjW7-hLBerl3bgYXCk4PYn0o,27656
150
+ replay/models/optimization/__init__.py,sha256=N8xCuzu0jQGwHrIBjuTRf-ZcZuBJ6FB0d9C5a7izJQU,338
151
+ replay/models/optimization/optuna_mixin.py,sha256=pKu-Vw9l2LsDycubpdJiLkC1eE4pKrDG0T2lhUgRUB4,11960
152
+ replay/models/optimization/optuna_objective.py,sha256=UHWOJwBngPA3IRz9yAMEWPg00oyb7Wq9PXuRPYHIiLE,7538
153
+ replay/models/pop_rec.py,sha256=Ju9y2rU2vW_jFU9-W15fbbr5_ZzYGihSjSxsqKsAf0Q,4964
154
+ replay/models/query_pop_rec.py,sha256=UNsHtf3eQpJom73ZmEO5us4guI4SnCLJYTfuUpRgqes,4086
155
+ replay/models/random_rec.py,sha256=9SC012_X3sNzrAjDG1CPGhjisZb6gnv4VCW7yIMSNpk,8066
156
+ replay/models/slim.py,sha256=OAdTS64bObZujzHkq8vfP1kkoLMSWxk1KLg6lCCA0N8,4551
157
+ replay/models/thompson_sampling.py,sha256=gcjlVl1mPiEVt70y8frA762O-eCZzd3SVg1lnDRCEHk,1939
158
+ replay/models/ucb.py,sha256=b2qFgvOAZcyv5triPk18duqF_jt-ty7mypenjRLNWwQ,6952
159
+ replay/models/wilson.py,sha256=o7aUWjq3648dAfgGBoWD5Gu-HzdyobPMaH2lzCLijiA,4558
160
+ replay/models/word2vec.py,sha256=atfj6GjR_L-TdurRFr1yi7B3BicJ3ZdFxixW9RfojJs,8882
161
+ replay/preprocessing/__init__.py,sha256=c6wFPAc6lATyp0lE-ZDjHMsXyEMPKX7Usuqylv6H5XQ,597
162
+ replay/preprocessing/converter.py,sha256=JQ-4u5x0eXtswl1iH-bZITBXQov1nebnZ6XcvpD8Twk,4417
163
+ replay/preprocessing/discretizer.py,sha256=jzYqvoSVmiL-oS-ri9Om0vSDoU8bCQimjUoe7FiPfLU,27024
164
+ replay/preprocessing/filters.py,sha256=C0zR4LOnGJsMzowuWfaTPR457RppgLZRhcZFV1WkS7o,45845
165
+ replay/preprocessing/history_based_fp.py,sha256=oEu1CkCz7xcGbPdSTHfhTe1NimnFo50Arn8qngRBgE8,18702
166
+ replay/preprocessing/label_encoder.py,sha256=eWsPa5mZq7_9SDxkaiI8mpCfIKTKNr-tlNmfqEunnTk,41432
167
+ replay/preprocessing/sessionizer.py,sha256=G6i0K3FwqtweRxvcSYraJ-tBWAT2HnV-bWHHlIZJF-s,12217
168
+ replay/scenarios/__init__.py,sha256=XXAKEQPTLlve-0O6NPwFgahFrb4oGcIq3HaYaaGxG2E,94
169
+ replay/scenarios/fallback.py,sha256=dO3s9jqYup4rbgMaY6Z6HGm1r7SXkm7jOvNZDr5zm_U,7138
170
+ replay/splitters/__init__.py,sha256=DnqVMelrzLwR8fGQgcWN_8FipGs8T4XGSPOMW-L_x2g,454
171
+ replay/splitters/base_splitter.py,sha256=zvYVEHBYrK8Y2qPv3kYachfLFwR9-kUAiU1UJSNGS8A,7749
172
+ replay/splitters/cold_user_random_splitter.py,sha256=32VgAHiwk9Emkofu1KqwGZrrFiyrYtSQ3YPdt5p_XoQ,4423
173
+ replay/splitters/k_folds.py,sha256=RDDL3gE6M5qfK5Ig-pxxJeq3O4uxsWJjLFQRRzQ2Ssg,6211
174
+ replay/splitters/last_n_splitter.py,sha256=hMWIGYFg17LioT08VBXut5Ic-w9oXsKd739cy2xuwYs,15368
175
+ replay/splitters/new_users_splitter.py,sha256=NksAdl_wL9zwHj3cY5NqrrnkOajgyUDloSsRZ9HUE48,9160
176
+ replay/splitters/random_splitter.py,sha256=0DO0qulT0jp_GXswmFh3BMJ7utS-z9e-r5jIrmTKGC4,2989
177
+ replay/splitters/ratio_splitter.py,sha256=rFWN-nKBYx1qKrmtYzjYf08DWFiKOCo5ZRUz-NHJFfs,17506
178
+ replay/splitters/time_splitter.py,sha256=0ZAMK26b--1wjrfzCuNVBh7gMPTa8SGf4LMEgACiUxA,9013
179
+ replay/splitters/two_stage_splitter.py,sha256=8Zn6BTJmZg04CD4l2jmil2dEu6xtglJaSS5mkotIXRc,17823
180
+ replay/utils/__init__.py,sha256=N5ScuX9qeIk-D2Xrt0GEFiO-VWCPiEVyD3ZVByLtvf4,385
181
+ replay/utils/common.py,sha256=92MTG51WpeEQJ2gu-WvdNe4Fmqm8ze-y1VNIAHW81jQ,5358
182
+ replay/utils/dataframe_bucketizer.py,sha256=LipmBBQkdkLGroZpbP9i7qvTombLdMxo2dUUys1m5OY,3748
183
+ replay/utils/distributions.py,sha256=UuhaC9HI6HnUXW97fEd-TsyDk4JT8t7k1T_6l5FpOMs,1203
184
+ replay/utils/model_handler.py,sha256=6WRyd39B-UXTtKTHWD_ssYN1vMmkjd417bwKb50uqJY,5754
185
+ replay/utils/session_handler.py,sha256=fQo2wseow8yuzKnEXT-aYAXcQIgRbTTXp0v7g1VVi0w,5138
186
+ replay/utils/spark_utils.py,sha256=GbRp-MuUoO3Pc4chFvlmo9FskSlRLeNlC3Go5pEJ6Ok,27411
187
+ replay/utils/time.py,sha256=J8asoQBytPcNw-BLGADYIsKeWhIoN1H5hKiX9t2AMqo,9376
188
+ replay/utils/types.py,sha256=rD9q9CqEXgF4yy512Hv2nXclvwcnfodOnhBZ1HSUI4c,1260
189
+ replay/utils/warnings.py,sha256=ByAEhT1jKvq02llCtAXM-ioutLsBZjtiqIBesoVi4_Y,929
190
+ replay_rec-0.20.0rc0.dist-info/METADATA,sha256=0Y3iIt67qHGsc3NsjFn1J-SM0EPANWdbhiqv0GDxPyw,13155
191
+ replay_rec-0.20.0rc0.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
192
+ replay_rec-0.20.0rc0.dist-info/licenses/LICENSE,sha256=rPmcA7UrHxBChEAAlJyE24qUWKKl9yLQXxFsKeg_LX4,11344
193
+ replay_rec-0.20.0rc0.dist-info/licenses/NOTICE,sha256=k0bo4KHiHLRax5K3XKTTrf2Fi8V91mJ-R3FMdh6Reg0,2002
194
+ replay_rec-0.20.0rc0.dist-info/RECORD,,