metacontroller-pytorch 0.0.35__py3-none-any.whl → 0.0.37__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of metacontroller-pytorch might be problematic. Click here for more details.

@@ -336,6 +336,8 @@ class MetaController(Module):
336
336
 
337
337
  return control_signal, MetaControllerOutput(next_hiddens, residual_stream, action_dist, sampled_latent_action, switch_beta, kl_loss, switch_loss)
338
338
 
339
+ MetaController.policy_loss = policy_loss
340
+
339
341
  # main transformer, which is subsumed into the environment after behavioral cloning
340
342
 
341
343
  Hiddens = namedtuple('Hiddens', (
@@ -406,6 +408,7 @@ class Transformer(Module):
406
408
  meta_controller: Module | None = None,
407
409
  cache: TransformerOutput | None = None,
408
410
  discovery_phase = False,
411
+ force_behavior_cloning = False,
409
412
  meta_controller_temperature = 1.,
410
413
  return_raw_action_dist = False,
411
414
  return_latents = False,
@@ -414,17 +417,25 @@ class Transformer(Module):
414
417
  ):
415
418
  device = state.device
416
419
 
420
+ # meta controller is either given or already given at init
421
+
417
422
  meta_controller = default(meta_controller, self.meta_controller)
418
423
 
419
- meta_controlling = exists(meta_controller)
424
+ if force_behavior_cloning:
425
+ assert not discovery_phase, 'discovery phase cannot be set to True if force behavioral cloning is set to True'
426
+ meta_controller = None
427
+
428
+ has_meta_controller = exists(meta_controller)
420
429
 
421
- behavioral_cloning = not meta_controlling and not return_raw_action_dist
430
+ assert not (discovery_phase and not has_meta_controller), 'meta controller must be made available during discovery phase'
431
+
432
+ behavioral_cloning = force_behavior_cloning or (not has_meta_controller and not return_raw_action_dist)
422
433
 
423
434
  # by default, if meta controller is passed in, transformer is no grad
424
435
 
425
- lower_transformer_context = nullcontext if not meta_controlling else torch.no_grad
426
- meta_controller_context = nullcontext if meta_controlling else torch.no_grad
427
- upper_transformer_context = nullcontext if (not meta_controlling or discovery_phase) else torch.no_grad
436
+ lower_transformer_context = nullcontext if not has_meta_controller else torch.no_grad
437
+ meta_controller_context = nullcontext if has_meta_controller else torch.no_grad
438
+ upper_transformer_context = nullcontext if (not has_meta_controller or discovery_phase) else torch.no_grad
428
439
 
429
440
  # handle cache
430
441
 
@@ -432,7 +443,8 @@ class Transformer(Module):
432
443
 
433
444
  # handle maybe behavioral cloning
434
445
 
435
- if behavioral_cloning or (meta_controlling and discovery_phase):
446
+ if behavioral_cloning or discovery_phase: # during behavior cloning and discovery phase, the network is predicting / reconstructing the next token
447
+
436
448
  assert exists(actions), f'`actions` cannot be empty when doing discovery or behavioral cloning'
437
449
 
438
450
  state, target_state = state[:, :-1], state[:, 1:]
@@ -465,7 +477,7 @@ class Transformer(Module):
465
477
 
466
478
  with meta_controller_context():
467
479
 
468
- if exists(meta_controller):
480
+ if exists(meta_controller) and not behavioral_cloning:
469
481
  control_signal, next_meta_hiddens = meta_controller(residual_stream, cache = meta_hiddens, discovery_phase = discovery_phase, temperature = meta_controller_temperature, episode_lens = episode_lens)
470
482
  else:
471
483
  control_signal, next_meta_hiddens = self.zero, None
@@ -495,7 +507,7 @@ class Transformer(Module):
495
507
 
496
508
  return state_clone_loss, action_clone_loss
497
509
 
498
- elif meta_controlling and discovery_phase:
510
+ elif discovery_phase:
499
511
 
500
512
  action_recon_loss = self.action_readout.calculate_loss(dist_params, target_actions)
501
513
 
@@ -28,7 +28,7 @@ from torch_einops_utils.save_load import save_load
28
28
 
29
29
  from vector_quantize_pytorch import BinaryMapper
30
30
 
31
- from metacontroller.metacontroller import MetaControllerOutput
31
+ from metacontroller.metacontroller import MetaControllerOutput, policy_loss
32
32
 
33
33
  # constants
34
34
 
@@ -170,7 +170,7 @@ class MetaControllerWithBinaryMapper(Module):
170
170
  action_log_probs = log_probs.gather(-1, codes)
171
171
  action_log_probs = rearrange(action_log_probs, '... 1 -> ...')
172
172
 
173
- return action_log_probs.sum(dim = -1)
173
+ return action_log_probs
174
174
 
175
175
  def forward(
176
176
  self,
@@ -302,3 +302,5 @@ class MetaControllerWithBinaryMapper(Module):
302
302
  switch_beta = rearrange(switch_beta, '... 1 -> ...')
303
303
 
304
304
  return control_signal, MetaControllerOutput(next_hiddens, residual_stream, binary_logits, sampled_codes, switch_beta, kl_loss, switch_loss)
305
+
306
+ MetaControllerWithBinaryMapper.policy_loss = policy_loss
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: metacontroller-pytorch
3
- Version: 0.0.35
3
+ Version: 0.0.37
4
4
  Summary: Transformer Metacontroller
5
5
  Project-URL: Homepage, https://pypi.org/project/metacontroller/
6
6
  Project-URL: Repository, https://github.com/lucidrains/metacontroller
@@ -39,7 +39,7 @@ Requires-Dist: discrete-continuous-embed-readout>=0.1.12
39
39
  Requires-Dist: einops>=0.8.1
40
40
  Requires-Dist: einx>=0.3.0
41
41
  Requires-Dist: loguru
42
- Requires-Dist: memmap-replay-buffer>=0.0.23
42
+ Requires-Dist: memmap-replay-buffer>=0.0.25
43
43
  Requires-Dist: torch-einops-utils>=0.0.19
44
44
  Requires-Dist: torch>=2.5
45
45
  Requires-Dist: vector-quantize-pytorch>=1.27.20
@@ -0,0 +1,8 @@
1
+ metacontroller/__init__.py,sha256=lj7IOGpN--qMxJWbB-4SGqoPXG7Hd4mgtToTRSyTZ58,57
2
+ metacontroller/metacontroller.py,sha256=3cVg4TkfD8bkuED0mcGcfAEjJujcJ9tf_qMB8ict12c,17017
3
+ metacontroller/metacontroller_with_binary_mapper.py,sha256=odZs49ZWY7_FfEweYkD0moX7Vn0jGd91FjFTxzjLyr8,9480
4
+ metacontroller/transformer_with_resnet.py,sha256=R49ycusbq3kEX97WHZ41WY2ONc2mYPOuRUCmaFcBOEo,5546
5
+ metacontroller_pytorch-0.0.37.dist-info/METADATA,sha256=4mkDBWI-ma5TR38PpAIxEKj6VlVlQOYvJQAGVswQ3IQ,4747
6
+ metacontroller_pytorch-0.0.37.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
7
+ metacontroller_pytorch-0.0.37.dist-info/licenses/LICENSE,sha256=1yCiA9b5nhslTavxPjsQAO-wpOnwJR9-l8LTVi7GJuk,1066
8
+ metacontroller_pytorch-0.0.37.dist-info/RECORD,,
@@ -1,8 +0,0 @@
1
- metacontroller/__init__.py,sha256=lj7IOGpN--qMxJWbB-4SGqoPXG7Hd4mgtToTRSyTZ58,57
2
- metacontroller/metacontroller.py,sha256=sj-cHpYm9NHZEBKbLQaf4MtZCv2lcBI2cAyj5Y9bAgc,16410
3
- metacontroller/metacontroller_with_binary_mapper.py,sha256=9mMKMp3zVQzjbJvoC1dBRibarHHgjnOf1tRyeY1VvAM,9423
4
- metacontroller/transformer_with_resnet.py,sha256=R49ycusbq3kEX97WHZ41WY2ONc2mYPOuRUCmaFcBOEo,5546
5
- metacontroller_pytorch-0.0.35.dist-info/METADATA,sha256=eV2Y0yW-iY2_I0gPyCA8OqChqVWFwh3GkJZFzQcZ2a0,4747
6
- metacontroller_pytorch-0.0.35.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
7
- metacontroller_pytorch-0.0.35.dist-info/licenses/LICENSE,sha256=1yCiA9b5nhslTavxPjsQAO-wpOnwJR9-l8LTVi7GJuk,1066
8
- metacontroller_pytorch-0.0.35.dist-info/RECORD,,