nextrec 0.4.3__py3-none-any.whl → 0.4.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
nextrec/__version__.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.4.3"
1
+ __version__ = "0.4.5"
@@ -1,12 +1,54 @@
1
1
  """
2
2
  Date: create on 09/11/2025
3
3
  Author:
4
- Yang Zhou,zyaztec@gmail.com
4
+ Yang Zhou,zyaztec@gmail.com
5
5
  Reference:
6
- [1] Lian J, Zhou X, Zhang F, et al. xdeepfm: Combining explicit and implicit feature interactions
7
- for recommender systems[C]//Proceedings of the 24th ACM SIGKDD international conference on
8
- knowledge discovery & data mining. 2018: 1754-1763.
9
- (https://arxiv.org/abs/1803.05170)
6
+ [1] Lian J, Zhou X, Zhang F, et al. xdeepfm: Combining explicit and implicit feature interactions
7
+ for recommender systems[C]//Proceedings of the 24th ACM SIGKDD international conference on
8
+ knowledge discovery & data mining. 2018: 1754-1763.
9
+ (https://arxiv.org/abs/1803.05170)
10
+
11
+ xDeepFM is a CTR prediction model that unifies explicit and implicit
12
+ feature interaction learning. It extends DeepFM by adding the
13
+ Compressed Interaction Network (CIN) to explicitly model high-order
14
+ interactions at the vector-wise level, while an MLP captures implicit
15
+ non-linear crosses. A linear term retains first-order signals, and all
16
+ three parts are learned jointly end-to-end.
17
+
18
+ In the forward pass:
19
+ (1) Embedding Layer: transforms sparse/sequence fields into dense vectors
20
+ (2) Linear Part: captures first-order contributions of sparse/sequence fields
21
+ (3) CIN: explicitly builds higher-order feature crosses via convolution over
22
+ outer products of field embeddings, with optional split-half connections
23
+ (4) Deep Part (MLP): models implicit, non-linear interactions across all fields
24
+ (5) Combination: sums outputs from linear, CIN, and deep branches before the
25
+ task-specific prediction layer
26
+
27
+ Key Advantages:
28
+ - Jointly learns first-order, explicit high-order, and implicit interactions
29
+ - CIN offers interpretable vector-wise crosses with controlled complexity
30
+ - Deep branch enhances representation power for non-linear patterns
31
+ - End-to-end optimization eliminates heavy manual feature engineering
32
+ - Flexible design supports both sparse and sequence features
33
+
34
+ xDeepFM 是一个 CTR 预估模型,将显式与隐式的特征交互学习统一到同一框架。
35
+ 在 DeepFM 的基础上,额外引入了 CIN(Compressed Interaction Network)
36
+ 显式建模高阶向量级交互,同时 MLP 负责隐式非线性交互,线性部分保留一阶信号,
37
+ 三者联合训练。
38
+
39
+ 前向流程:
40
+ (1) 嵌入层:将稀疏/序列特征映射为稠密向量
41
+ (2) 线性部分:建模稀疏/序列特征的一阶贡献
42
+ (3) CIN:通过对字段嵌入做外积并卷积,显式捕获高阶交叉,可选 split-half 以控参
43
+ (4) 深层部分(MLP):对所有特征进行隐式非线性交互建模
44
+ (5) 融合:线性、CIN、MLP 输出求和后进入任务预测层
45
+
46
+ 主要优点:
47
+ - 同时学习一阶、显式高阶、隐式交互
48
+ - CIN 提供可解释的向量级交叉并可控复杂度
49
+ - 深层分支提升非线性表达能力
50
+ - 端到端训练降低人工特征工程需求
51
+ - 兼容稀疏与序列特征的建模
10
52
  """
11
53
 
12
54
  import torch
nextrec/utils/config.py CHANGED
@@ -160,8 +160,11 @@ def build_feature_objects(
160
160
  SparseFeature(
161
161
  name=name,
162
162
  vocab_size=int(vocab_size),
163
+ embedding_name=embed_cfg.get("embedding_name", name),
163
164
  embedding_dim=embed_cfg.get("embedding_dim"),
164
165
  padding_idx=embed_cfg.get("padding_idx"),
166
+ init_type=embed_cfg.get("init_type", "xavier_uniform"),
167
+ init_params=embed_cfg.get("init_params"),
165
168
  l1_reg=embed_cfg.get("l1_reg", 0.0),
166
169
  l2_reg=embed_cfg.get("l2_reg", 1e-5),
167
170
  trainable=embed_cfg.get("trainable", True),
@@ -184,9 +187,12 @@ def build_feature_objects(
184
187
  name=name,
185
188
  vocab_size=int(vocab_size),
186
189
  max_len=embed_cfg.get("max_len") or proc_cfg.get("max_len", 50),
190
+ embedding_name=embed_cfg.get("embedding_name", name),
187
191
  embedding_dim=embed_cfg.get("embedding_dim"),
188
192
  padding_idx=embed_cfg.get("padding_idx"),
189
193
  combiner=embed_cfg.get("combiner", "mean"),
194
+ init_type=embed_cfg.get("init_type", "xavier_uniform"),
195
+ init_params=embed_cfg.get("init_params"),
190
196
  l1_reg=embed_cfg.get("l1_reg", 0.0),
191
197
  l2_reg=embed_cfg.get("l2_reg", 1e-5),
192
198
  trainable=embed_cfg.get("trainable", True),
@@ -5,10 +5,9 @@ Date: create on 13/11/2025
5
5
  Author: Yang Zhou, zyaztec@gmail.com
6
6
  """
7
7
 
8
- from typing import Any, Dict, Set, cast
8
+ from typing import Any, Dict, Set
9
9
 
10
10
  import torch.nn as nn
11
- from torch.nn.init import _NonlinearityType
12
11
 
13
12
  KNOWN_NONLINEARITIES: Set[str] = {
14
13
  "linear",
@@ -27,28 +26,25 @@ KNOWN_NONLINEARITIES: Set[str] = {
27
26
  }
28
27
 
29
28
 
30
- def resolve_nonlinearity(activation: str | _NonlinearityType) -> _NonlinearityType:
31
- if isinstance(activation, str):
32
- if activation in KNOWN_NONLINEARITIES:
33
- return cast(_NonlinearityType, activation)
34
- # Fall back to linear for custom activations (gain handled separately).
35
- return "linear"
36
- return activation
29
+ def resolve_nonlinearity(activation: str):
30
+ if activation in KNOWN_NONLINEARITIES:
31
+ return activation
32
+ return "linear"
37
33
 
38
34
 
39
- def resolve_gain(activation: str | _NonlinearityType, param: Dict[str, Any]) -> float:
35
+ def resolve_gain(activation: str, param: Dict[str, Any]) -> float:
40
36
  if "gain" in param:
41
37
  return param["gain"]
42
38
  nonlinearity = resolve_nonlinearity(activation)
43
39
  try:
44
- return nn.init.calculate_gain(nonlinearity, param.get("param"))
40
+ return nn.init.calculate_gain(nonlinearity, param.get("param")) # type: ignore
45
41
  except ValueError:
46
- return 1.0 # custom activation with no gain estimate available
42
+ return 1.0
47
43
 
48
44
 
49
45
  def get_initializer(
50
46
  init_type: str = "normal",
51
- activation: str | _NonlinearityType = "linear",
47
+ activation: str = "linear",
52
48
  param: Dict[str, Any] | None = None,
53
49
  ):
54
50
  param = param or {}
@@ -62,11 +58,11 @@ def get_initializer(
62
58
  nn.init.xavier_normal_(tensor, gain=gain)
63
59
  elif init_type == "kaiming_uniform":
64
60
  nn.init.kaiming_uniform_(
65
- tensor, a=param.get("a", 0), nonlinearity=nonlinearity
61
+ tensor, a=param.get("a", 0), nonlinearity=nonlinearity # type: ignore
66
62
  )
67
63
  elif init_type == "kaiming_normal":
68
64
  nn.init.kaiming_normal_(
69
- tensor, a=param.get("a", 0), nonlinearity=nonlinearity
65
+ tensor, a=param.get("a", 0), nonlinearity=nonlinearity # type: ignore
70
66
  )
71
67
  elif init_type == "orthogonal":
72
68
  nn.init.orthogonal_(tensor, gain=gain)
@@ -80,4 +76,4 @@ def get_initializer(
80
76
  raise ValueError(f"Unknown init_type: {init_type}")
81
77
  return tensor
82
78
 
83
- return initializer_fn
79
+ return initializer_fn
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: nextrec
3
- Version: 0.4.3
3
+ Version: 0.4.5
4
4
  Summary: A comprehensive recommendation library with match, ranking, and multi-task learning models
5
5
  Project-URL: Homepage, https://github.com/zerolovesea/NextRec
6
6
  Project-URL: Repository, https://github.com/zerolovesea/NextRec
@@ -63,7 +63,7 @@ Description-Content-Type: text/markdown
63
63
  ![Python](https://img.shields.io/badge/Python-3.10+-blue.svg)
64
64
  ![PyTorch](https://img.shields.io/badge/PyTorch-1.10+-ee4c2c.svg)
65
65
  ![License](https://img.shields.io/badge/License-Apache%202.0-green.svg)
66
- ![Version](https://img.shields.io/badge/Version-0.4.3-orange.svg)
66
+ ![Version](https://img.shields.io/badge/Version-0.4.5-orange.svg)
67
67
 
68
68
  English | [中文文档](README_zh.md)
69
69
 
@@ -84,60 +84,65 @@ English | [中文文档](README_zh.md)
84
84
 
85
85
  ## Introduction
86
86
 
87
- NextRec is a modern recommendation system framework built on PyTorch, providing researchers and engineering teams with a fast modeling, training, and evaluation experience. The framework adopts a modular design with rich built-in model implementations, data processing tools, and engineering-ready training components, covering various recommendation scenarios. NextRec provides easy-to-use interfaces, command-line tools, and tutorials, enabling recommendation algorithm learners to quickly understand model architectures and train and infer models at the fastest speed.
87
+ NextRec is a modern recommendation framework built on PyTorch, delivering a unified experience for modeling, training, and evaluation. Design with rich model implementations, data-processing utilities, and engineering-ready training components. NextRec focuses on large-scale industrial recommendation scenarios on Spark clusters, training on massive offline features(`parquet/csv`).
88
88
 
89
89
  ## Why NextRec
90
90
 
91
- - **Unified feature engineering & data pipeline**: NextRec provides Dense/Sparse/Sequence feature definitions, persistent DataProcessor, and batch-optimized RecDataLoader, matching the model training and inference process based on offline `parquet/csv` features in industrial big-data Spark/Hive scenarios.
92
- - **Multi-scenario recommendation capabilities**: Covers ranking (CTR/CVR), retrieval, multi-task learning and other recommendation/marketing models, with a continuously expanding model zoo.
93
- - **Developer-friendly experience**: Supports stream preprocessing/distributed training/inference for various data formats (`csv/parquet/pathlike`), GPU acceleration and visual metric monitoring, facilitating experiments for business algorithm engineers and recommendation algorithm learners.
94
- - **Flexible command-line tool**: Through configuring training and inference config files, start training and inference processes with one command `nextrec --mode=train --train_config=train_config.yaml`, facilitating rapid experiment iteration and agile deployment.
95
- - **Efficient training & evaluation**: NextRec's standardized training engine comes with various optimizers, learning rate schedulers, early stopping, model checkpoints, and detailed log management built-in, ready to use out of the box.
91
+ - **Unified feature engineering & data pipeline**: NextRec provide unified Dense/Sparse/Sequence feature definitions, DataProcessor, and batch-optimized RecDataLoader, matching offline feature training/inference in industrial big-data settings.
92
+ - **Multi-scenario coverage**: Ranking (CTR/CVR), retrieval, multi-task learning, and more marketing/rec models, with a continuously expanding model zoo.
93
+ - **Developer-friendly experience**: `Stream processing/distributed training/inference` for `csv/parquet/pathlike` data, plus GPU/MPS acceleration and visualization support.
94
+ - **Efficient training & evaluation**: Standardized engine with optimizers, LR schedulers, early stopping, checkpoints, and detailed logging out of the box.
96
95
 
97
96
  ## Architecture
98
97
 
99
- NextRec adopts a modular and low-coupling engineering design, enabling full-pipeline reusability and scalability across data processing → model construction → training & evaluation → inference & deployment. Its core components include: a Feature-Spec-driven Embedding architecture, the BaseModel abstraction, a set of independent reusable Layers, a unified DataLoader for both training and inference, and a ready-to-use Model Zoo.
98
+ NextRec adopts a modular design, enabling full-pipeline reusability and scalability across data processing → model construction → training & evaluation → inference & deployment. Its core components include: a Feature-Spec-driven Embedding architecture, the BaseModel abstraction, a set of independent reusable Layers, a unified DataLoader for both training and inference, and a ready-to-use Model Zoo.
100
99
 
101
100
  ![NextRec Architecture](assets/nextrec_diagram_en.png)
102
101
 
103
- > The project borrows ideas from excellent open-source rec libraries. Early layers referenced [torch-rechub](https://github.com/datawhalechina/torch-rechub) but have been replaced with in-house implementations. torch-rechub remains mature in architecture and models; the author contributed a bit there—feel free to check it out.
102
+ > The project borrows ideas from excellent open-source rec libraries, for example: [torch-rechub](https://github.com/datawhalechina/torch-rechub). torch-rechub remains mature in architecture and models; the author contributed a bit there—feel free to check it out.
104
103
 
105
104
  ---
106
105
 
107
106
  ## Installation
108
107
 
109
- You can quickly install the latest NextRec via `pip install nextrec`; Python 3.10+ is required.
108
+ You can quickly install the latest NextRec via `pip install nextrec`; Python 3.10+ is required. If you want to run some tutorial codes, pull this project first:
110
109
 
110
+ ```bash
111
+ git clone https://github.com/zerolovesea/NextRec.git
112
+ cd NextRec/
113
+ pip install nextrec # or pip install -e .
114
+ ```
111
115
 
112
116
  ## Tutorials
113
117
 
114
- We provide multiple examples in the `tutorials/` directory, covering ranking, retrieval, multi-task, and data processing scenarios:
118
+ See `tutorials/` for examples covering ranking, retrieval, multi-task learning, and data processing:
119
+
120
+ - [movielen_ranking_deepfm.py](/tutorials/movielen_ranking_deepfm.py) — DeepFM training on MovieLens 100k dataset
121
+ - [example_ranking_din.py](/tutorials/example_ranking_din.py) — DIN Deep Interest Network training on e-commerce dataset
122
+ - [example_multitask.py](/tutorials/example_multitask.py) — ESMM multi-task learning training on e-commerce dataset
123
+ - [movielen_match_dssm.py](/tutorials/example_match_dssm.py) — DSSM retrieval model training on MovieLens 100k dataset
115
124
 
116
- - [movielen_ranking_deepfm.py](/tutorials/movielen_ranking_deepfm.py) — DeepFM model training example on MovieLens 100k dataset
117
- - [example_ranking_din.py](/tutorials/example_ranking_din.py) — DIN deep interest network training example on e-commerce dataset
118
- - [example_multitask.py](/tutorials/example_multitask.py) — ESMM multi-task learning training example on e-commerce dataset
119
- - [movielen_match_dssm.py](/tutorials/example_match_dssm.py) — DSSM retrieval model example trained on MovieLens 100k dataset
120
- - [run_all_ranking_models.py](/tutorials/run_all_ranking_models.py) — Quickly verify the availability of all ranking models
121
- - [run_all_multitask_models.py](/tutorials/run_all_multitask_models.py) — Quickly verify the availability of all multi-task models
122
- - [run_all_match_models.py](/tutorials/run_all_match_models.py) — Quickly verify the availability of all retrieval models
125
+ - [run_all_ranking_models.py](/tutorials/run_all_ranking_models.py) — Quickly validate availability of all ranking models
126
+ - [run_all_multitask_models.py](/tutorials/run_all_multitask_models.py) — Quickly validate availability of all multi-task models
127
+ - [run_all_match_models.py](/tutorials/run_all_match_models.py) — Quickly validate availability of all retrieval models
123
128
 
124
- If you want to learn more details about the NextRec framework, we also provide Jupyter notebooks to help you understand:
129
+ To dive deeper into NextRec framework details, Jupyter notebooks are available:
125
130
 
126
- - [How to get started with the NextRec framework](/tutorials/notebooks/en/Hands%20on%20nextrec.ipynb)
127
- - [How to use the data processor for data preprocessing](/tutorials/notebooks/en/Hands%20on%20dataprocessor.ipynb)
131
+ - [Hands on the NextRec framework](/tutorials/notebooks/en/Hands%20on%20nextrec.ipynb)
132
+ - [Using the data processor for preprocessing](/tutorials/notebooks/en/Hands%20on%20dataprocessor.ipynb)
128
133
 
129
134
  ## 5-Minute Quick Start
130
135
 
131
- We provide a detailed quick start guide and paired datasets to help you become familiar with different features of the NextRec framework. We provide a test dataset from an e-commerce scenario in the `datasets/` path, with data examples as follows:
136
+ We provide a detailed quick-start guide and paired datasets to help you get familiar with different features of NextRec framework. In `datasets/` you'll find an e-commerce scenario test dataset like this:
132
137
 
133
138
  | user_id | item_id | dense_0 | dense_1 | dense_2 | dense_3 | dense_4 | dense_5 | dense_6 | dense_7 | sparse_0 | sparse_1 | sparse_2 | sparse_3 | sparse_4 | sparse_5 | sparse_6 | sparse_7 | sparse_8 | sparse_9 | sequence_0 | sequence_1 | label |
134
139
  |--------|---------|-------------|-------------|-------------|------------|-------------|-------------|-------------|-------------|----------|----------|----------|----------|----------|----------|----------|----------|----------|----------|-----------------------------------------------------------|-----------------------------------------------------------|-------|
135
140
  | 1 | 7817 | 0.14704075 | 0.31020382 | 0.77780896 | 0.944897 | 0.62315375 | 0.57124174 | 0.77009535 | 0.3211029 | 315 | 260 | 379 | 146 | 168 | 161 | 138 | 88 | 5 | 312 | [170,175,97,338,105,353,272,546,175,545,463,128,0,0,0] | [368,414,820,405,548,63,327,0,0,0,0,0,0,0,0] | 0 |
136
141
  | 1 | 3579 | 0.77811223 | 0.80359334 | 0.5185201 | 0.91091245 | 0.043562356 | 0.82142705 | 0.8803686 | 0.33748195 | 149 | 229 | 442 | 6 | 167 | 252 | 25 | 402 | 7 | 168 | [179,48,61,551,284,165,344,151,0,0,0,0,0,0,0] | [814,0,0,0,0,0,0,0,0,0,0,0,0,0,0] | 1 |
137
142
 
138
- Next, we'll use a short example to show you how to train a DIN model using NextRec. DIN (Deep Interest Network) is from Alibaba's 2018 KDD Best Paper, used for CTR prediction scenarios. You can also directly execute `python tutorials/example_ranking_din.py` to run the training and inference code.
143
+ Below is a short example showing how to train a DIN (Deep Interest Network) model. You can also run `python tutorials/example_ranking_din.py` directly to execute the training and inference code.
139
144
 
140
- After starting training, you can view detailed training logs in the `nextrec_logs/din_tutorial` path.
145
+ After training starts, you can find detailed training logs at `nextrec_logs/din_tutorial`.
141
146
 
142
147
  ```python
143
148
  import pandas as pd
@@ -215,6 +220,7 @@ metrics = model.evaluate(
215
220
  NextRec provides a powerful command-line interface for model training and prediction using YAML configuration files. For detailed CLI documentation, see:
216
221
 
217
222
  - [NextRec CLI User Guide](/nextrec_cli_preset/NextRec-CLI.md) - Complete guide for using the CLI
223
+ - [NextRec CLI Configuration Examples](/nextrec_cli_preset/) - CLI configuration file examples
218
224
 
219
225
  ```bash
220
226
  # Train a model
@@ -224,11 +230,11 @@ nextrec --mode=train --train_config=path/to/train_config.yaml
224
230
  nextrec --mode=predict --predict_config=path/to/predict_config.yaml
225
231
  ```
226
232
 
227
- > As of version 0.4.3, NextRec CLI supports single-machine training; distributed training features are currently under development.
233
+ > As of version 0.4.5, NextRec CLI supports single-machine training; distributed training features are currently under development.
228
234
 
229
235
  ## Platform Compatibility
230
236
 
231
- The current version is 0.4.3. All models and test code have been validated on the following platforms. If you encounter compatibility issues, please report them in the issue tracker with your system version:
237
+ The current version is 0.4.5. All models and test code have been validated on the following platforms. If you encounter compatibility issues, please report them in the issue tracker with your system version:
232
238
 
233
239
  | Platform | Configuration |
234
240
  |----------|---------------|
@@ -299,7 +305,7 @@ We welcome contributions of any form!
299
305
  4. Push your branch (`git push origin feature/AmazingFeature`)
300
306
  5. Open a Pull Request
301
307
 
302
- > Before submitting a PR, please run `python test/run_tests.py` and `python scripts/format_code.py` to ensure all tests pass and code style is unified.
308
+ > Before submitting a PR, please run `python test/run_tests.py` and `python scripts/format_code.py` to ensure all tests pass and code style is consistent.
303
309
 
304
310
  ### Code Style
305
311
 
@@ -1,5 +1,5 @@
1
1
  nextrec/__init__.py,sha256=_M3oUqyuvQ5k8Th_3wId6hQ_caclh7M5ad51XN09m98,235
2
- nextrec/__version__.py,sha256=Nyg0pmk5ea9-SLCAFEIF96ByFx4-TJFtrqYPN-Zn6g4,22
2
+ nextrec/__version__.py,sha256=ErkLkI2TDBX1OIqi2GGa20CPeu4ZculEi-XffRbLU6M,22
3
3
  nextrec/cli.py,sha256=b6tv7ZO7UBRVR6IfyqVP24JEcdu9-2_vV5MlfWcQucM,18468
4
4
  nextrec/basic/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
5
  nextrec/basic/activation.py,sha256=uzTWfCOtBSkbu_Gk9XBNTj8__s241CaYLJk6l8nGX9I,2885
@@ -49,21 +49,21 @@ nextrec/models/ranking/fm.py,sha256=ko_Eao9UfklakEk_TVEFZSyVAojmtclo1uIMBhL4FLU,
49
49
  nextrec/models/ranking/masknet.py,sha256=IDp2XyGHdjuiUTIBv2JxNQlMw5ANdv12_9YJOX7tnzw,12367
50
50
  nextrec/models/ranking/pnn.py,sha256=twwixy26mfAVaI9AqNnMLdwOG-WtDga60xsNiyJrFjI,8174
51
51
  nextrec/models/ranking/widedeep.py,sha256=Xm2klmKBOoSKWCBQN7FhwLStu0BHSTOgAJ9kwLmtiFY,5077
52
- nextrec/models/ranking/xdeepfm.py,sha256=LI_cCHjfQCG9H2tQKFC7NfyrLkm8FAUyjjbLoTIIpzY,5930
52
+ nextrec/models/ranking/xdeepfm.py,sha256=kcPLoNC1940YxRMgWZS4mSxIXlwtc_HfNDIae_uYrsU,8156
53
53
  nextrec/utils/__init__.py,sha256=zqU9vjRUpVzJepcvdbxboik68K5jnMR40kdVjr6tpXY,2599
54
- nextrec/utils/config.py,sha256=KGcKA7a592FkZ5wtbDmpvIc9Fk3uedj-BtJuRk2f4t8,18088
54
+ nextrec/utils/config.py,sha256=zbcZtpB24FQxUPYyY1MGilzn_rinhUC_L4iaGGXqLg4,18488
55
55
  nextrec/utils/device.py,sha256=DtgmrJnVJQKtgtVUbm0SW0vZ5Le0R9HU8TsvqPnRLZc,2453
56
56
  nextrec/utils/distributed.py,sha256=tIkgUjzEjR_FHOm9ckyM8KddkCfxNSogP-rdHcVGhuk,4782
57
57
  nextrec/utils/embedding.py,sha256=YSVnBeve0hVTPSfyxN4weGCK_Jd8SezRBqZgwJAR3Qw,496
58
58
  nextrec/utils/feature.py,sha256=LcXaWP98zMZhJTKL92VVHX8mqOE5Q0MyVq3hw5Z9kxs,300
59
59
  nextrec/utils/file.py,sha256=s2cO1LRbU7xPeAbVoOA6XOoV6wvLrW6oy6p9fVSz9pc,3024
60
- nextrec/utils/initializer.py,sha256=GzxasKewn4C14ERNdSo9el2jEa8GXXEB2hTQnRcK2IA,2517
60
+ nextrec/utils/initializer.py,sha256=4E3WgZNRBmO789m-Ip-7nmt4iPRcWpWNNMuw_xn1IGE,2202
61
61
  nextrec/utils/model.py,sha256=dYl1XfIZt6aVjNyV2AAhcArwFRMcEAKrjG_pr8AVHs0,1163
62
62
  nextrec/utils/optimizer.py,sha256=eX8baIvWOpwDTGninbyp6pQfzdHbIL62GTi4ldpYcfM,2337
63
63
  nextrec/utils/synthetic_data.py,sha256=WSbC5cs7TbuDc57BCO74S7VJdlK0fQmnZA2KM4vUpoI,17566
64
64
  nextrec/utils/tensor.py,sha256=Z6MBpSuQpHw4kGjeKxG0cXZMpRBCM45zTKhk9WolyiM,2220
65
- nextrec-0.4.3.dist-info/METADATA,sha256=rD4niOz9T9rLsvQwcXakLQpU6Zn2Jj8BFZeGZDMhiyE,18952
66
- nextrec-0.4.3.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
67
- nextrec-0.4.3.dist-info/entry_points.txt,sha256=NN-dNSdfMRTv86bNXM7d3ZEPW2BQC6bRi7QP7i9cIps,45
68
- nextrec-0.4.3.dist-info/licenses/LICENSE,sha256=2fQfVKeafywkni7MYHyClC6RGGC3laLTXCNBx-ubtp0,1064
69
- nextrec-0.4.3.dist-info/RECORD,,
65
+ nextrec-0.4.5.dist-info/METADATA,sha256=WDMMfIO6LNfmnZsSVuZHM7YYv3UJi6ZPcvcMRMlwG_4,18094
66
+ nextrec-0.4.5.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
67
+ nextrec-0.4.5.dist-info/entry_points.txt,sha256=NN-dNSdfMRTv86bNXM7d3ZEPW2BQC6bRi7QP7i9cIps,45
68
+ nextrec-0.4.5.dist-info/licenses/LICENSE,sha256=2fQfVKeafywkni7MYHyClC6RGGC3laLTXCNBx-ubtp0,1064
69
+ nextrec-0.4.5.dist-info/RECORD,,