nextrec 0.4.11__py3-none-any.whl → 0.4.13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,275 @@
1
+ """
2
+ Date: create on 19/12/2025
3
+ Checkpoint: edit on 19/12/2025
4
+ Author: Yang Zhou, zyaztec@gmail.com
5
+ Reference:
6
+ [1] Juan Y, Zhuang Y, Chin W-S, et al. Field-aware Factorization Machines for CTR
7
+ Prediction[C]//RecSys. 2016: 43-50.
8
+
9
+ Field-aware Factorization Machines (FFM) extend FM by learning a distinct
10
+ embedding of each feature for every target field. For a pair of fields (i, j),
11
+ FFM uses v_{i,f_j} · v_{j,f_i} instead of a shared embedding, enabling richer
12
+ context-aware interactions and stronger CTR performance on sparse categorical
13
+ data.
14
+
15
+ Pipeline:
16
+ (1) Build field-aware embeddings v_{i,f} for each feature i toward every field f
17
+ (2) Compute first-order linear terms for sparse/sequence (and optional dense) fields
18
+ (3) For each field pair (i, j), compute v_{i,f_j} · v_{j,f_i}
19
+ (4) Sum linear + field-aware interaction logits and output prediction
20
+
21
+ Key Advantages:
22
+ - Field-aware embeddings capture asymmetric interactions between fields
23
+ - Improves CTR accuracy on sparse categorical features
24
+ - Retains interpretable second-order structure
25
+
26
+ FFM 在 FM 基础上引入字段感知机制:每个特征在不同目标字段下拥有不同的 embedding。
27
+ 对于字段对 (i, j),模型使用 v_{i,f_j} 与 v_{j,f_i} 的内积,从而更细粒度地建模
28
+ 跨字段交互,在稀疏高维 CTR 场景中表现更优。
29
+
30
+ 处理流程:
31
+ (1) 为每个特征 i 构造面向每个字段 f 的嵌入 v_{i,f}
32
+ (2) 计算一阶线性项(稀疏/序列特征,及可选的稠密特征)
33
+ (3) 对每一对字段 (i, j) 计算 v_{i,f_j} · v_{j,f_i}
34
+ (4) 将线性项与交互项相加得到最终预测
35
+
36
+ 主要优点:
37
+ - 字段感知嵌入可捕捉非对称交互
38
+ - 稀疏类别特征下预测更准确
39
+ - 保持二阶结构的可解释性
40
+ """
41
+
42
+ import torch
43
+ import torch.nn as nn
44
+
45
+ from nextrec.basic.features import DenseFeature, SequenceFeature, SparseFeature
46
+ from nextrec.basic.layers import AveragePooling, InputMask, PredictionLayer, SumPooling
47
+ from nextrec.basic.model import BaseModel
48
+ from nextrec.utils.torch_utils import get_initializer
49
+
50
+
51
+ class FFM(BaseModel):
52
+ @property
53
+ def model_name(self):
54
+ return "FFM"
55
+
56
+ @property
57
+ def default_task(self):
58
+ return "binary"
59
+
60
+ def __init__(
61
+ self,
62
+ dense_features: list[DenseFeature] | None = None,
63
+ sparse_features: list[SparseFeature] | None = None,
64
+ sequence_features: list[SequenceFeature] | None = None,
65
+ target: list[str] | str | None = None,
66
+ task: str | list[str] | None = None,
67
+ optimizer: str = "adam",
68
+ optimizer_params: dict | None = None,
69
+ loss: str | nn.Module | None = "bce",
70
+ loss_params: dict | list[dict] | None = None,
71
+ device: str = "cpu",
72
+ embedding_l1_reg=1e-6,
73
+ dense_l1_reg=1e-5,
74
+ embedding_l2_reg=1e-5,
75
+ dense_l2_reg=1e-4,
76
+ **kwargs,
77
+ ):
78
+ dense_features = dense_features or []
79
+ sparse_features = sparse_features or []
80
+ sequence_features = sequence_features or []
81
+ optimizer_params = optimizer_params or {}
82
+ if loss is None:
83
+ loss = "bce"
84
+
85
+ super(FFM, self).__init__(
86
+ dense_features=dense_features,
87
+ sparse_features=sparse_features,
88
+ sequence_features=sequence_features,
89
+ target=target,
90
+ task=task or self.default_task,
91
+ device=device,
92
+ embedding_l1_reg=embedding_l1_reg,
93
+ dense_l1_reg=dense_l1_reg,
94
+ embedding_l2_reg=embedding_l2_reg,
95
+ dense_l2_reg=dense_l2_reg,
96
+ **kwargs,
97
+ )
98
+
99
+ self.fm_features = sparse_features + sequence_features
100
+ if len(self.fm_features) < 2:
101
+ raise ValueError(
102
+ "FFM requires at least two sparse/sequence features to build field-aware interactions."
103
+ )
104
+
105
+ self.embedding_dim = self.fm_features[0].embedding_dim
106
+ if any(f.embedding_dim != self.embedding_dim for f in self.fm_features):
107
+ raise ValueError(
108
+ "All FFM features must share the same embedding_dim for field-aware interactions."
109
+ )
110
+ for feature in self.fm_features:
111
+ if isinstance(feature, SequenceFeature) and feature.combiner == "concat":
112
+ raise ValueError(
113
+ "FFM does not support SequenceFeature with combiner='concat' because it breaks shared embedding_dim."
114
+ )
115
+
116
+ self.field_aware_embeddings = nn.ModuleDict()
117
+ for src_feature in self.fm_features:
118
+ for target_field in self.fm_features:
119
+ key = self.field_aware_key(src_feature, target_field)
120
+ if key in self.field_aware_embeddings:
121
+ continue
122
+ self.field_aware_embeddings[key] = self.build_embedding(src_feature)
123
+
124
+ # First-order terms for sparse/sequence features: one hot
125
+ self.first_order_embeddings = nn.ModuleDict()
126
+ for feature in self.fm_features:
127
+ if feature.embedding_name in self.first_order_embeddings:
128
+ continue
129
+ emb = nn.Embedding(
130
+ num_embeddings=feature.vocab_size,
131
+ embedding_dim=1,
132
+ padding_idx=feature.padding_idx,
133
+ )
134
+ self.first_order_embeddings[feature.embedding_name] = emb
135
+
136
+ # Optional dense linear term
137
+ self.dense_features = list(dense_features)
138
+ dense_input_dim = sum([f.input_dim for f in self.dense_features])
139
+ self.linear_dense = (
140
+ nn.Linear(dense_input_dim, 1, bias=True) if dense_input_dim > 0 else None
141
+ )
142
+
143
+ self.prediction_layer = PredictionLayer(task_type=self.task)
144
+ self.input_mask = InputMask()
145
+ self.mean_pool = AveragePooling()
146
+ self.sum_pool = SumPooling()
147
+
148
+ self.embedding_params.extend(
149
+ emb.weight for emb in self.field_aware_embeddings.values()
150
+ )
151
+ self.embedding_params.extend(
152
+ emb.weight for emb in self.first_order_embeddings.values()
153
+ )
154
+ self.register_regularization_weights(
155
+ embedding_attr="field_aware_embeddings", include_modules=["linear_dense"]
156
+ )
157
+
158
+ self.compile(
159
+ optimizer=optimizer,
160
+ optimizer_params=optimizer_params,
161
+ loss=loss,
162
+ loss_params=loss_params,
163
+ )
164
+
165
+ def field_aware_key(
166
+ self, src_feature: SparseFeature | SequenceFeature, target_field
167
+ ) -> str:
168
+ return f"{src_feature.embedding_name}__to__{target_field.name}"
169
+
170
+ def build_embedding(self, feature: SparseFeature | SequenceFeature) -> nn.Embedding:
171
+ if getattr(feature, "pretrained_weight", None) is not None:
172
+ weight = feature.pretrained_weight
173
+ if weight is None:
174
+ raise ValueError(
175
+ f"[FFM Error]: Pretrained weight for '{feature.embedding_name}' is None."
176
+ )
177
+ if weight.shape != (feature.vocab_size, feature.embedding_dim):
178
+ raise ValueError(
179
+ f"[FFM Error]: Pretrained weight for '{feature.embedding_name}' has shape {weight.shape}, expected ({feature.vocab_size}, {feature.embedding_dim})."
180
+ )
181
+ embedding = nn.Embedding.from_pretrained(
182
+ embeddings=weight,
183
+ freeze=feature.freeze_pretrained,
184
+ padding_idx=feature.padding_idx,
185
+ )
186
+ embedding.weight.requires_grad = (
187
+ feature.trainable and not feature.freeze_pretrained
188
+ )
189
+ else:
190
+ embedding = nn.Embedding(
191
+ num_embeddings=feature.vocab_size,
192
+ embedding_dim=feature.embedding_dim,
193
+ padding_idx=feature.padding_idx,
194
+ )
195
+ embedding.weight.requires_grad = feature.trainable
196
+ initialization = get_initializer(
197
+ init_type=feature.init_type,
198
+ activation="linear",
199
+ param=feature.init_params,
200
+ )
201
+ initialization(embedding.weight)
202
+ return embedding
203
+
204
+ def embed_for_field(
205
+ self,
206
+ feature: SparseFeature | SequenceFeature,
207
+ target_field,
208
+ x: dict[str, torch.Tensor],
209
+ ) -> torch.Tensor:
210
+ key = self.field_aware_key(feature, target_field)
211
+ emb = self.field_aware_embeddings[key]
212
+ if isinstance(feature, SparseFeature):
213
+ return emb(x[feature.name].long())
214
+
215
+ seq_input = x[feature.name].long()
216
+ if feature.max_len is not None and seq_input.size(1) > feature.max_len:
217
+ seq_input = seq_input[:, -feature.max_len :]
218
+ seq_emb = emb(seq_input) # [B, L, D]
219
+ mask = self.input_mask(x, feature, seq_input)
220
+ if feature.combiner == "mean":
221
+ return self.mean_pool(seq_emb, mask)
222
+ if feature.combiner == "sum":
223
+ return self.sum_pool(seq_emb, mask)
224
+ raise ValueError(
225
+ f"[FFM Error]: Unsupported combiner '{feature.combiner}' for sequence feature '{feature.name}'."
226
+ )
227
+
228
+ def forward(self, x):
229
+ batch_size = x[self.fm_features[0].name].size(0)
230
+ device = x[self.fm_features[0].name].device
231
+ y_linear = torch.zeros(batch_size, 1, device=device)
232
+
233
+ # First-order dense part
234
+ if self.linear_dense is not None:
235
+ dense_inputs = [
236
+ x[f.name].float().view(batch_size, -1) for f in self.dense_features
237
+ ]
238
+ dense_stack = torch.cat(dense_inputs, dim=1) if dense_inputs else None
239
+ if dense_stack is not None:
240
+ y_linear = y_linear + self.linear_dense(dense_stack)
241
+
242
+ # First-order sparse/sequence part
243
+ first_order_terms = []
244
+ for feature in self.fm_features:
245
+ emb = self.first_order_embeddings[feature.embedding_name]
246
+ if isinstance(feature, SparseFeature):
247
+ term = emb(x[feature.name].long()) # [B, 1]
248
+ else:
249
+ seq_input = x[feature.name].long()
250
+ if feature.max_len is not None and seq_input.size(1) > feature.max_len:
251
+ seq_input = seq_input[:, -feature.max_len :]
252
+ mask = self.input_mask(x, feature, seq_input).squeeze(1) # [B, L]
253
+ seq_weight = emb(seq_input).squeeze(-1) # [B, L]
254
+ term = (seq_weight * mask).sum(dim=1, keepdim=True) # [B, 1]
255
+ first_order_terms.append(term)
256
+ if first_order_terms:
257
+ y_linear = y_linear + torch.sum(
258
+ torch.stack(first_order_terms, dim=1), dim=1
259
+ )
260
+
261
+ # Field-aware interactions
262
+ y_interaction = torch.zeros(batch_size, 1, device=device)
263
+ num_fields = len(self.fm_features)
264
+ for i in range(num_fields - 1):
265
+ feature_i = self.fm_features[i]
266
+ for j in range(i + 1, num_fields):
267
+ feature_j = self.fm_features[j]
268
+ v_i_fj = self.embed_for_field(feature_i, feature_j, x)
269
+ v_j_fi = self.embed_for_field(feature_j, feature_i, x)
270
+ y_interaction = y_interaction + torch.sum(
271
+ v_i_fj * v_j_fi, dim=1, keepdim=True
272
+ )
273
+
274
+ y = y_linear + y_interaction
275
+ return self.prediction_layer(y)
@@ -113,8 +113,6 @@ class LR(BaseModel):
113
113
  )
114
114
 
115
115
  def forward(self, x):
116
- input_linear = self.embedding(
117
- x=x, features=self.all_features, squeeze_dim=True
118
- )
116
+ input_linear = self.embedding(x=x, features=self.all_features, squeeze_dim=True)
119
117
  y = self.linear(input_linear)
120
118
  return self.prediction_layer(y)
nextrec/utils/__init__.py CHANGED
@@ -36,7 +36,7 @@ from .data import (
36
36
  )
37
37
  from .embedding import get_auto_embedding_dim
38
38
  from .feature import normalize_to_list
39
- from .model import get_mlp_output_dim, merge_features
39
+ from .model import compute_pair_scores, get_mlp_output_dim, merge_features
40
40
  from .torch_utils import (
41
41
  add_distributed_sampler,
42
42
  concat_tensors,
@@ -88,6 +88,7 @@ __all__ = [
88
88
  # Model utilities
89
89
  "merge_features",
90
90
  "get_mlp_output_dim",
91
+ "compute_pair_scores",
91
92
  # Feature utilities
92
93
  "normalize_to_list",
93
94
  # Config utilities
nextrec/utils/console.py CHANGED
@@ -4,7 +4,7 @@ Console and CLI utilities for NextRec.
4
4
  This module centralizes CLI logging helpers, progress display, and metric tables.
5
5
 
6
6
  Date: create on 19/12/2025
7
- Checkpoint: edit on 19/12/2025
7
+ Checkpoint: edit on 20/12/2025
8
8
  Author: Yang Zhou, zyaztec@gmail.com
9
9
  """
10
10
 
@@ -242,6 +242,14 @@ def display_metrics_table(
242
242
  normalized_order.append(name)
243
243
  task_order = normalized_order
244
244
 
245
+ if not task_order and not grouped and not metrics:
246
+ if isinstance(loss, numbers.Number):
247
+ msg = f"Epoch {epoch}/{epochs} - {split} (loss={float(loss):.4f})"
248
+ if colorize is not None:
249
+ msg = colorize(msg)
250
+ logging.info(msg)
251
+ return
252
+
245
253
  if Console is None or Table is None or box is None:
246
254
  prefix = f"Epoch {epoch}/{epochs} - {split}:"
247
255
  segments: list[str] = []
nextrec/utils/model.py CHANGED
@@ -7,6 +7,8 @@ Author: Yang Zhou, zyaztec@gmail.com
7
7
 
8
8
  from collections import OrderedDict
9
9
 
10
+ import torch
11
+
10
12
 
11
13
  def merge_features(primary, secondary) -> list:
12
14
  merged: OrderedDict[str, object] = OrderedDict()
@@ -42,3 +44,15 @@ def select_features(
42
44
  )
43
45
 
44
46
  return [feature_map[name] for name in names]
47
+
48
+
49
+ def compute_pair_scores(model, data, batch_size: int = 512):
50
+ user_emb = model.encode_user(data, batch_size=batch_size)
51
+ item_emb = model.encode_item(data, batch_size=batch_size)
52
+ with torch.no_grad():
53
+ user_tensor = torch.as_tensor(user_emb, device=model.device)
54
+ item_tensor = torch.as_tensor(item_emb, device=model.device)
55
+ scores = model.compute_similarity(user_tensor, item_tensor)
56
+ if model.training_mode == "pointwise":
57
+ scores = torch.sigmoid(scores)
58
+ return scores.detach().cpu().numpy()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: nextrec
3
- Version: 0.4.11
3
+ Version: 0.4.13
4
4
  Summary: A comprehensive recommendation library with match, ranking, and multi-task learning models
5
5
  Project-URL: Homepage, https://github.com/zerolovesea/NextRec
6
6
  Project-URL: Repository, https://github.com/zerolovesea/NextRec
@@ -66,7 +66,7 @@ Description-Content-Type: text/markdown
66
66
  ![Python](https://img.shields.io/badge/Python-3.10+-blue.svg)
67
67
  ![PyTorch](https://img.shields.io/badge/PyTorch-1.10+-ee4c2c.svg)
68
68
  ![License](https://img.shields.io/badge/License-Apache%202.0-green.svg)
69
- ![Version](https://img.shields.io/badge/Version-0.4.11-orange.svg)
69
+ ![Version](https://img.shields.io/badge/Version-0.4.13-orange.svg)
70
70
 
71
71
  中文文档 | [English Version](README_en.md)
72
72
 
@@ -99,7 +99,7 @@ NextRec是一个基于PyTorch的现代推荐系统框架,旨在为研究工程
99
99
 
100
100
  ## NextRec近期进展
101
101
 
102
- - **12/12/2025** 在v0.4.11中加入了[RQ-VAE](/nextrec/models/representation/rqvae.py)模块。配套的[数据集](/dataset/ecommerce_task.csv)和[代码](tutorials/notebooks/zh/使用RQ-VAE构建语义ID.ipynb)已经同步在仓库中
102
+ - **12/12/2025** 在v0.4.13中加入了[RQ-VAE](/nextrec/models/representation/rqvae.py)模块。配套的[数据集](/dataset/ecommerce_task.csv)和[代码](tutorials/notebooks/zh/使用RQ-VAE构建语义ID.ipynb)已经同步在仓库中
103
103
  - **07/12/2025** 发布了NextRec CLI命令行工具,它允许用户根据配置文件进行一键训练和推理,我们提供了相关的[教程](/nextrec_cli_preset/NextRec-CLI_zh.md)和[教学代码](/nextrec_cli_preset)
104
104
  - **03/12/2025** NextRec获得了100颗🌟!感谢大家的支持
105
105
  - **06/12/2025** 在v0.4.1中支持了单机多卡的分布式DDP训练,并且提供了配套的[代码](tutorials/distributed)
@@ -128,7 +128,7 @@ pip install nextrec # or pip install -e .
128
128
  - [movielen_ranking_deepfm.py](/tutorials/movielen_ranking_deepfm.py) - movielen 100k数据集上的 DeepFM 模型训练示例
129
129
  - [example_ranking_din.py](/tutorials/example_ranking_din.py) - 电商数据集上的DIN 深度兴趣网络训练示例
130
130
  - [example_multitask.py](/tutorials/example_multitask.py) - 电商数据集上的ESMM多任务学习训练示例
131
- - [movielen_match_dssm.py](/tutorials/example_match_dssm.py) - 基于movielen 100k数据集训练的 DSSM 召回模型示例
131
+ - [movielen_match_dssm.py](/tutorials/movielen_match_dssm.py) - 基于movielen 100k数据集训练的 DSSM 召回模型示例
132
132
 
133
133
  - [example_distributed_training.py](/tutorials/distributed/example_distributed_training.py) - 使用NextRec进行单机多卡训练的代码示例
134
134
 
@@ -240,11 +240,11 @@ nextrec --mode=train --train_config=path/to/train_config.yaml
240
240
  nextrec --mode=predict --predict_config=path/to/predict_config.yaml
241
241
  ```
242
242
 
243
- > 截止当前版本0.4.11,NextRec CLI支持单机训练,分布式训练相关功能尚在开发中。
243
+ > 截止当前版本0.4.13,NextRec CLI支持单机训练,分布式训练相关功能尚在开发中。
244
244
 
245
245
  ## 兼容平台
246
246
 
247
- 当前最新版本为0.4.11,所有模型和测试代码均已在以下平台通过验证,如果开发者在使用中遇到兼容问题,请在issue区提出错误报告及系统版本:
247
+ 当前最新版本为0.4.13,所有模型和测试代码均已在以下平台通过验证,如果开发者在使用中遇到兼容问题,请在issue区提出错误报告及系统版本:
248
248
 
249
249
  | 平台 | 配置 |
250
250
  |------|------|
@@ -262,7 +262,7 @@ nextrec --mode=predict --predict_config=path/to/predict_config.yaml
262
262
  | [FM](nextrec/models/ranking/fm.py) | Factorization Machines | ICDM 2010 | 已支持 |
263
263
  | [LR](nextrec/models/ranking/lr.py) | Logistic Regression | - | 已支持 |
264
264
  | [AFM](nextrec/models/ranking/afm.py) | Attentional Factorization Machines: Learning the Weight of Feature Interactions via Attention Networks | IJCAI 2017 | 已支持 |
265
- | [FFM](nextrec/models/ranking/ffm.py) | Field-aware Factorization Machines | RecSys 2010 | 开发中 |
265
+ | [FFM](nextrec/models/ranking/ffm.py) | Field-aware Factorization Machines | RecSys 2016 | 已支持 |
266
266
  | [DeepFM](nextrec/models/ranking/deepfm.py) | DeepFM: A Factorization-Machine based Neural Network for CTR Prediction | IJCAI 2017 | 已支持 |
267
267
  | [Wide&Deep](nextrec/models/ranking/widedeep.py) | Wide & Deep Learning for Recommender Systems | DLRS 2016 | 已支持 |
268
268
  | [xDeepFM](nextrec/models/ranking/xdeepfm.py) | xDeepFM: Combining Explicit and Implicit Feature Interactions | KDD 2018 | 已支持 |
@@ -1,34 +1,35 @@
1
1
  nextrec/__init__.py,sha256=_M3oUqyuvQ5k8Th_3wId6hQ_caclh7M5ad51XN09m98,235
2
- nextrec/__version__.py,sha256=xIphSmmFF5C8ZjsK5bpruTtbjrTL9bI6TjdjgsELGCw,23
3
- nextrec/cli.py,sha256=PXRNXMRm_a_1u6StnjsHefq0rKqsc6Mzx3mZmc9553g,23803
2
+ nextrec/__version__.py,sha256=ARFl7G-gCe12exBb-FIsJnbsUD5V9okxkHUUdQqb0RA,23
3
+ nextrec/cli.py,sha256=6nBY8O8-0931h428eQS8CALkKn1FmizovJme7Q1c_O0,23978
4
4
  nextrec/basic/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
5
  nextrec/basic/activation.py,sha256=uzTWfCOtBSkbu_Gk9XBNTj8__s241CaYLJk6l8nGX9I,2885
6
- nextrec/basic/callback.py,sha256=a6gg7r3x1v0xaOSya9PteLql7I14nepY7gX8tDYtins,14679
7
- nextrec/basic/features.py,sha256=Wnbzr7UMotgv1Vzeg0o9Po-KKIvYUSYIghoVDfMPx_g,4340
8
- nextrec/basic/layers.py,sha256=GJH2Tx3IkZrYGb7-ET976iHCC28Ubck_NO9-iyY4mDI,28911
9
- nextrec/basic/loggers.py,sha256=JnQiFvmsVgZ63gqBLR2ZFWrVPzkxRbzWhTdeoiJKcos,6526
10
- nextrec/basic/metrics.py,sha256=8RswR_3MGvIBkT_n6fnmON2eYH-hfD7kIKVnyJJjL3o,23131
11
- nextrec/basic/model.py,sha256=OCcV9nTAZukurRISzPGCQM5yJ0Fpph3vOMKb2CPkI68,98685
6
+ nextrec/basic/callback.py,sha256=nn1f8FG9c52vJ-gvwteqPbk3-1QuNS1vmhBlkENdb0I,14636
7
+ nextrec/basic/features.py,sha256=GyCUzGPuizUofrZSSOdqHK84YhnX4MGTdu7Cx2OGhUA,4654
8
+ nextrec/basic/layers.py,sha256=ZM3Nka3e2cit3e3peL0ukJCMgKZK1ovNFfAWvVOwlos,28556
9
+ nextrec/basic/loggers.py,sha256=Zh1A5DVAFqlGglyaQ4_IMgvFbWAcXX5H3aHbCWA82nE,6524
10
+ nextrec/basic/metrics.py,sha256=saNgM7kuHk9xqDxZF6x33irTaxeXCU-hxYTUQauuGgg,23074
11
+ nextrec/basic/model.py,sha256=b_O81WSv1XxBAS5oQk92DlLdYAtnikr_epaV5T9RSxs,102570
12
12
  nextrec/basic/session.py,sha256=UOG_-EgCOxvqZwCkiEd8sgNV2G1sm_HbzKYVQw8yYDI,4483
13
13
  nextrec/data/__init__.py,sha256=YZQjpty1pDCM7q_YNmiA2sa5kbujUw26ObLHWjMPjKY,1194
14
14
  nextrec/data/batch_utils.py,sha256=0bYGVX7RlhnHv_ZBaUngjDIpBNw-igCk98DgOsF7T6o,2879
15
15
  nextrec/data/data_processing.py,sha256=lKXDBszrO5fJMAQetgSPr2mSQuzOluuz1eHV4jp0TDU,5538
16
16
  nextrec/data/data_utils.py,sha256=0Ls1cnG9lBz0ovtyedw5vwp7WegGK_iF-F8e_3DEddo,880
17
17
  nextrec/data/dataloader.py,sha256=xTORNbaQVa20sk2S3kyV0SSngscvq8bNqHr0AmYjFqM,18768
18
- nextrec/data/preprocessor.py,sha256=wNjivq2N-iDzBropkp3YfSkN0jSA4l4h81C-ECa6k4c,44643
19
- nextrec/loss/__init__.py,sha256=-sibZK8QXLblVNWqdqjrPPzMCDyIXSq7yd2eZ57p9Nw,810
18
+ nextrec/data/preprocessor.py,sha256=K-cUP-YdlQx1VJ2m1CXuprncpjDJe2ERVO5xCSoxHKI,44470
19
+ nextrec/loss/__init__.py,sha256=ZCgsfyR5YAecv6MdOsnUjkfacvZg2coQVjuKAfPvmRo,923
20
+ nextrec/loss/grad_norm.py,sha256=91Grspx95Xu_639TkL_WZRX1xt5QOTZCzBeJWbUGPiE,8385
20
21
  nextrec/loss/listwise.py,sha256=UT9vJCOTOQLogVwaeTV7Z5uxIYnngGdxk-p9e97MGkU,5744
21
- nextrec/loss/loss_utils.py,sha256=Eg_EKm47onSCLhgs2q7IkB7TV9TwV1Dz4QgVR2yh-gc,4610
22
+ nextrec/loss/loss_utils.py,sha256=xMmT_tWcKah_xcU3FzVMmSEzyZfxiMKZWUbwkAspcDg,4579
22
23
  nextrec/loss/pairwise.py,sha256=X9yg-8pcPt2IWU0AiUhWAt3_4W_3wIF0uSdDYTdoPFY,3398
23
24
  nextrec/loss/pointwise.py,sha256=o9J3OznY0hlbDsUXqn3k-BBzYiuUH5dopz8QBFqS_kQ,7343
24
25
  nextrec/models/generative/__init__.py,sha256=0MV3P-_ainPaTxmRBGWKUVCEt14KJvuvEHmRB3OQ1Fs,176
25
26
  nextrec/models/generative/tiger.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
26
27
  nextrec/models/multi_task/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
27
- nextrec/models/multi_task/esmm.py,sha256=3QQePhSkOcM4t472S4E5xU9_KiLiSwHb9CfdkEgmqqk,6491
28
- nextrec/models/multi_task/mmoe.py,sha256=uFTbc0MiFBDTCIt8mTW6xs0oyOn1EesIHHZo81HR35k,8583
29
- nextrec/models/multi_task/ple.py,sha256=z32etizNlTLwwR7CYKxy8u9owAbtiRh492Fje_y64hQ,13016
30
- nextrec/models/multi_task/poso.py,sha256=foH7XDUz0XN0s0zoyHLuTmrcs3QOT8-x4YGxLX1Lxxg,19016
31
- nextrec/models/multi_task/share_bottom.py,sha256=rmEnsX3LA3pNsLKfG1ir5WDLdkSY-imO_ASiclirJiA,6519
28
+ nextrec/models/multi_task/esmm.py,sha256=AqesBZ4tOFNm7POCrHZ90h1zWWSViZAYfydUVOh2dEU,6545
29
+ nextrec/models/multi_task/mmoe.py,sha256=aaQKcx4PL_mAanW3tkjAR886KmMCHTdBuu4p9EIKQJo,8657
30
+ nextrec/models/multi_task/ple.py,sha256=fqkujPFGxxQOO_6nBZEz_UcxLEUoX_vCJsk0YOpxTg4,13084
31
+ nextrec/models/multi_task/poso.py,sha256=J_Btxhm9JpFJMdQQHNNf9mMRHOgO7j1ts6VN5o4qJnk,19193
32
+ nextrec/models/multi_task/share_bottom.py,sha256=DTWm6fpLCLiXimD-qk_0YIKT_9THMFDrnx4GDViXc_g,6583
32
33
  nextrec/models/ranking/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
33
34
  nextrec/models/ranking/afm.py,sha256=96jGUPL4yTWobMIVBjHpOxl9AtAzCAGR8yw7Sy2JmdQ,10125
34
35
  nextrec/models/ranking/autoint.py,sha256=S6Cxnp1q2OErSYqmIix5P-b4qLWR-0dY6TMStuU6WLg,8109
@@ -37,11 +38,11 @@ nextrec/models/ranking/dcn_v2.py,sha256=QnqQbJsrtQp4mtvnBXFUVefKyr4dw-gHNWrCbO26
37
38
  nextrec/models/ranking/deepfm.py,sha256=aXoK59e2KaaPe5vfyFW4YiHbX4E2iG3gxFCxmWo8RHk,5200
38
39
  nextrec/models/ranking/dien.py,sha256=c7Zs85vxhOgKHg5s0QcSLCn1xXCCSD177TMERgM_v8g,18958
39
40
  nextrec/models/ranking/din.py,sha256=gdUhuKiKXBNOALbK8fGhlbSeuDT8agcEdNSrC_wveHc,9422
40
- nextrec/models/ranking/eulernet.py,sha256=mZTrD8rKbGbWMEeWpTl8mVimytLFJTLM5-LS_I3U6cw,13115
41
- nextrec/models/ranking/ffm.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
41
+ nextrec/models/ranking/eulernet.py,sha256=SQr7M_6GI1u09jpxzRasQLFAPLXcmqff69waER6fiD8,12201
42
+ nextrec/models/ranking/ffm.py,sha256=9t685SViSU1J0ESz-lrYSXhf4d4BWLNYZXReeVEk3e8,11262
42
43
  nextrec/models/ranking/fibinet.py,sha256=_eroddVHooJcaGT8MqS4mUrtv5j4pnTmfI3FoAKOZhs,7919
43
44
  nextrec/models/ranking/fm.py,sha256=SsrSKK3y4xg5Lv-t3JLnZan55Hzze2AxAiVPuscy0bk,4536
44
- nextrec/models/ranking/lr.py,sha256=Qf8RozgWlsKjHGVbo-94d2Z_4kMfCXHmvwkYu3WVZjQ,4030
45
+ nextrec/models/ranking/lr.py,sha256=MUonlKyA77_bfshTupFjOhY5tiuSJxApFM-_yOk4Nwk,4008
45
46
  nextrec/models/ranking/masknet.py,sha256=tY1y2lO0iq82oylPN0SBnL5Bikc8weinFXpURyVT1hE,12373
46
47
  nextrec/models/ranking/pnn.py,sha256=FcNIFAw5J0ORGSR6L8ZK7NeXlJPpojwe_SpsxMQqCFw,8174
47
48
  nextrec/models/ranking/widedeep.py,sha256=-ghKfe_0puvlI9fBQr8lK3gXkfVvslGwP40AJTGqc7w,5077
@@ -62,16 +63,16 @@ nextrec/models/retrieval/sdm.py,sha256=LhkCZSfGhxOxziEkUtjr_hnqcyciJ2qpMoBSFBVW9
62
63
  nextrec/models/retrieval/youtube_dnn.py,sha256=xtGPV6_5LeSZBKkrTaU1CmtxlhgYLvZmjpwYaXYIaEA,7403
63
64
  nextrec/models/sequential/hstu.py,sha256=P2Kl7HEL3afwiCApGKQ6UbUNO9eNXXrB10H7iiF8cI0,19735
64
65
  nextrec/models/sequential/sasrec.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
65
- nextrec/utils/__init__.py,sha256=5ss2XQq8QZ2Ko5eiQ7oIig5cIZNrYGIptaarYEeO7Fk,2550
66
+ nextrec/utils/__init__.py,sha256=C-1l-suSsN_MlPlj_5LApyCRQLOao5l7bO0SccwKHw4,2598
66
67
  nextrec/utils/config.py,sha256=0HOeMyTlx8g6BZVpXzo2lEOkb-mzNwhbigQuUomsYnY,19934
67
- nextrec/utils/console.py,sha256=D2Vax9_b7bgvAAOyk-Q2oUhSk1B-OngY5buS9Gb9-I0,11398
68
+ nextrec/utils/console.py,sha256=e94SiwA0gKn2pfpP94mY_jl-kFok3TCjxo298KdFuP4,11696
68
69
  nextrec/utils/data.py,sha256=alruiWZFbmwy3kO12q42VXmtHmXFFjVULpHa43fx_mI,21098
69
70
  nextrec/utils/embedding.py,sha256=akAEc062MG2cD7VIOllHaqtwzAirQR2gq5iW7oKpGAU,1449
70
71
  nextrec/utils/feature.py,sha256=rsUAv3ELyDpehVw8nPEEsLCCIjuKGTJJZuFaWB_wrPk,633
71
- nextrec/utils/model.py,sha256=dYl1XfIZt6aVjNyV2AAhcArwFRMcEAKrjG_pr8AVHs0,1163
72
+ nextrec/utils/model.py,sha256=3B85a0IJCggI26dxv25IX8R_5yQPo7wXI0JIAns6bkQ,1727
72
73
  nextrec/utils/torch_utils.py,sha256=AKfYbSOJjEw874xsDB5IO3Ote4X7vnqzt_E0jJny0o8,13468
73
- nextrec-0.4.11.dist-info/METADATA,sha256=QlIiSVfAdB1MxGkkgshFLPkc1KbvhCTj2NfXe4lR71M,20957
74
- nextrec-0.4.11.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
75
- nextrec-0.4.11.dist-info/entry_points.txt,sha256=NN-dNSdfMRTv86bNXM7d3ZEPW2BQC6bRi7QP7i9cIps,45
76
- nextrec-0.4.11.dist-info/licenses/LICENSE,sha256=2fQfVKeafywkni7MYHyClC6RGGC3laLTXCNBx-ubtp0,1064
77
- nextrec-0.4.11.dist-info/RECORD,,
74
+ nextrec-0.4.13.dist-info/METADATA,sha256=BcBFpd0l4OdNRlXtG5R1UT-jMcAdloQJjOAG33E4KRE,20958
75
+ nextrec-0.4.13.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
76
+ nextrec-0.4.13.dist-info/entry_points.txt,sha256=NN-dNSdfMRTv86bNXM7d3ZEPW2BQC6bRi7QP7i9cIps,45
77
+ nextrec-0.4.13.dist-info/licenses/LICENSE,sha256=2fQfVKeafywkni7MYHyClC6RGGC3laLTXCNBx-ubtp0,1064
78
+ nextrec-0.4.13.dist-info/RECORD,,