rxnn 0.2.41__tar.gz → 0.2.42__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {rxnn-0.2.41 → rxnn-0.2.42}/PKG-INFO +1 -1
- {rxnn-0.2.41 → rxnn-0.2.42}/pyproject.toml +1 -1
- {rxnn-0.2.41 → rxnn-0.2.42}/src/rxnn/training/mrl.py +2 -2
- {rxnn-0.2.41 → rxnn-0.2.42}/LICENSE +0 -0
- {rxnn-0.2.41 → rxnn-0.2.42}/README.md +0 -0
- {rxnn-0.2.41 → rxnn-0.2.42}/src/rxnn/.DS_Store +0 -0
- {rxnn-0.2.41 → rxnn-0.2.42}/src/rxnn/__init__.py +0 -0
- {rxnn-0.2.41 → rxnn-0.2.42}/src/rxnn/experimental/__init__.py +0 -0
- {rxnn-0.2.41 → rxnn-0.2.42}/src/rxnn/experimental/attention.py +0 -0
- {rxnn-0.2.41 → rxnn-0.2.42}/src/rxnn/experimental/models.py +0 -0
- {rxnn-0.2.41 → rxnn-0.2.42}/src/rxnn/experimental/moe.py +0 -0
- {rxnn-0.2.41 → rxnn-0.2.42}/src/rxnn/memory/__init__.py +0 -0
- {rxnn-0.2.41 → rxnn-0.2.42}/src/rxnn/memory/attention.py +0 -0
- {rxnn-0.2.41 → rxnn-0.2.42}/src/rxnn/memory/norm.py +0 -0
- {rxnn-0.2.41 → rxnn-0.2.42}/src/rxnn/memory/stm.py +0 -0
- {rxnn-0.2.41 → rxnn-0.2.42}/src/rxnn/rxt/__init__.py +0 -0
- {rxnn-0.2.41 → rxnn-0.2.42}/src/rxnn/rxt/models.py +0 -0
- {rxnn-0.2.41 → rxnn-0.2.42}/src/rxnn/training/__init__.py +0 -0
- {rxnn-0.2.41 → rxnn-0.2.42}/src/rxnn/training/base.py +0 -0
- {rxnn-0.2.41 → rxnn-0.2.42}/src/rxnn/training/bml.py +0 -0
- {rxnn-0.2.41 → rxnn-0.2.42}/src/rxnn/training/callbacks.py +0 -0
- {rxnn-0.2.41 → rxnn-0.2.42}/src/rxnn/training/dataset.py +0 -0
- {rxnn-0.2.41 → rxnn-0.2.42}/src/rxnn/training/ddp.py +0 -0
- {rxnn-0.2.41 → rxnn-0.2.42}/src/rxnn/training/models.py +0 -0
- {rxnn-0.2.41 → rxnn-0.2.42}/src/rxnn/training/reward.py +0 -0
- {rxnn-0.2.41 → rxnn-0.2.42}/src/rxnn/training/rl.py +0 -0
- {rxnn-0.2.41 → rxnn-0.2.42}/src/rxnn/training/scheduler.py +0 -0
- {rxnn-0.2.41 → rxnn-0.2.42}/src/rxnn/training/tokenizer.py +0 -0
- {rxnn-0.2.41 → rxnn-0.2.42}/src/rxnn/training/utils.py +0 -0
- {rxnn-0.2.41 → rxnn-0.2.42}/src/rxnn/transformers/__init__.py +0 -0
- {rxnn-0.2.41 → rxnn-0.2.42}/src/rxnn/transformers/attention.py +0 -0
- {rxnn-0.2.41 → rxnn-0.2.42}/src/rxnn/transformers/ff.py +0 -0
- {rxnn-0.2.41 → rxnn-0.2.42}/src/rxnn/transformers/layers.py +0 -0
- {rxnn-0.2.41 → rxnn-0.2.42}/src/rxnn/transformers/mask.py +0 -0
- {rxnn-0.2.41 → rxnn-0.2.42}/src/rxnn/transformers/models.py +0 -0
- {rxnn-0.2.41 → rxnn-0.2.42}/src/rxnn/transformers/moe.py +0 -0
- {rxnn-0.2.41 → rxnn-0.2.42}/src/rxnn/transformers/positional.py +0 -0
- {rxnn-0.2.41 → rxnn-0.2.42}/src/rxnn/transformers/sampler.py +0 -0
- {rxnn-0.2.41 → rxnn-0.2.42}/src/rxnn/utils.py +0 -0
@@ -1022,7 +1022,7 @@ class MRLTrainer:
|
|
1022
1022
|
|
1023
1023
|
return (epochs, unfreeze_epoch), (random_resets, random_resets_from, random_resets_ratio)
|
1024
1024
|
|
1025
|
-
def __call__(self, curriculum_config: list[CurriculumConfig], batch_size: int):
|
1025
|
+
def __call__(self, curriculum_config: list[CurriculumConfig], batch_size: int, ddp_find_unused_parameters: bool = False):
|
1026
1026
|
"""Start Memory Reinforcement Learning Curriculum."""
|
1027
1027
|
|
1028
1028
|
# 0. Set global epoch count for all stages
|
@@ -1033,7 +1033,7 @@ class MRLTrainer:
|
|
1033
1033
|
if self.use_ddp:
|
1034
1034
|
rank, world_size = get_os_ddp_config()
|
1035
1035
|
dist.init_process_group(backend='nccl', rank=rank, world_size=world_size)
|
1036
|
-
self.actor = DistributedDataParallel(self.actor, device_ids=[self.device.index])
|
1036
|
+
self.actor = DistributedDataParallel(self.actor, device_ids=[self.device.index], find_unused_parameters=ddp_find_unused_parameters)
|
1037
1037
|
self.critic = DistributedDataParallel(self.critic, device_ids=[self.device.index])
|
1038
1038
|
|
1039
1039
|
# 2. Init BatchSampler with actor model (we have to run it after DDP init)
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|