evolutionary-policy-optimization 0.2.7__py3-none-any.whl → 0.2.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -167,6 +167,9 @@ def shrink_and_perturb_(
167
167
 
168
168
  assert 0. <= shrink_factor <= 1.
169
169
 
170
+ device = next(module.parameters()).device
171
+ maybe_sync_seed(device)
172
+
170
173
  for p in module.parameters():
171
174
  noise = torch.randn_like(p.data)
172
175
  p.data.mul_(1. - shrink_factor).add_(noise * perturb_factor)
@@ -383,6 +386,7 @@ class StateNorm(Module):
383
386
  return normed
384
387
 
385
388
  # style mapping network from StyleGAN2
389
+ # https://arxiv.org/abs/1912.04958
386
390
 
387
391
  class EqualLinear(Module):
388
392
  def __init__(
@@ -851,8 +855,11 @@ class LatentGenePool(Module):
851
855
  inplace = True,
852
856
  migrate = None # trigger a migration in the setting of multiple islands, the loop outside will need to have some `migrate_every` hyperparameter
853
857
  ):
858
+
854
859
  device = self.latents.device
855
860
 
861
+ maybe_sync_seed(device)
862
+
856
863
  if not divisible_by(self.step.item(), self.apply_genetic_algorithm_every):
857
864
  self.advance_step_()
858
865
  return
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: evolutionary-policy-optimization
3
- Version: 0.2.7
3
+ Version: 0.2.9
4
4
  Summary: EPO - Pytorch
5
5
  Project-URL: Homepage, https://pypi.org/project/evolutionary-policy-optimization/
6
6
  Project-URL: Repository, https://github.com/lucidrains/evolutionary-policy-optimization
@@ -1,10 +1,10 @@
1
1
  evolutionary_policy_optimization/__init__.py,sha256=NyiYDYU7DlpmOTM7xiBQET3r1WwX0ebrgMCBLSQrW3c,288
2
2
  evolutionary_policy_optimization/distributed.py,sha256=MxyxqxANAuOm8GYb0Yu09EHd_aVLhK2uwgrfuVWciPU,2342
3
3
  evolutionary_policy_optimization/env_wrappers.py,sha256=bDL06o9_b1iW6k3fw2xifnOnYlzs643tdW6Yv2gsIdw,803
4
- evolutionary_policy_optimization/epo.py,sha256=OKumVrOH7DSKZhbbx-5oCI_JJwJNYf4lrEpCNmbj6ZY,52991
4
+ evolutionary_policy_optimization/epo.py,sha256=adUHDtgrXnSOoPLnbPy6xxGl6QLYxbN1mB_sl2KPwgI,53135
5
5
  evolutionary_policy_optimization/experimental.py,sha256=7LOrMIaU4fr2Vme1ZpHNIvlvFEIdWj0-uemhQoNJcPQ,5549
6
6
  evolutionary_policy_optimization/mock_env.py,sha256=TLyyRm6tOD0Kdn9QqJJQriaSnsR-YmNQHo4OohmZFG4,1410
7
- evolutionary_policy_optimization-0.2.7.dist-info/METADATA,sha256=MiG_AYp6KoANhdGuaGM37-zaciW8dCrO0KvkXk7hO7w,9171
8
- evolutionary_policy_optimization-0.2.7.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
9
- evolutionary_policy_optimization-0.2.7.dist-info/licenses/LICENSE,sha256=1yCiA9b5nhslTavxPjsQAO-wpOnwJR9-l8LTVi7GJuk,1066
10
- evolutionary_policy_optimization-0.2.7.dist-info/RECORD,,
7
+ evolutionary_policy_optimization-0.2.9.dist-info/METADATA,sha256=sf6iwunGaiuum2yoHwFpanhi9JGwNg621VyZ9H7z_yI,9171
8
+ evolutionary_policy_optimization-0.2.9.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
9
+ evolutionary_policy_optimization-0.2.9.dist-info/licenses/LICENSE,sha256=1yCiA9b5nhslTavxPjsQAO-wpOnwJR9-l8LTVi7GJuk,1066
10
+ evolutionary_policy_optimization-0.2.9.dist-info/RECORD,,