x-transformers 2.1.14__tar.gz → 2.1.15__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. {x_transformers-2.1.14 → x_transformers-2.1.15}/PKG-INFO +1 -1
  2. {x_transformers-2.1.14 → x_transformers-2.1.15}/pyproject.toml +1 -1
  3. {x_transformers-2.1.14 → x_transformers-2.1.15}/tests/test_x_transformers.py +1 -1
  4. {x_transformers-2.1.14 → x_transformers-2.1.15}/x_transformers/belief_state_wrapper.py +3 -3
  5. {x_transformers-2.1.14 → x_transformers-2.1.15}/.github/FUNDING.yml +0 -0
  6. {x_transformers-2.1.14 → x_transformers-2.1.15}/.github/workflows/python-publish.yml +0 -0
  7. {x_transformers-2.1.14 → x_transformers-2.1.15}/.github/workflows/python-test.yaml +0 -0
  8. {x_transformers-2.1.14 → x_transformers-2.1.15}/.gitignore +0 -0
  9. {x_transformers-2.1.14 → x_transformers-2.1.15}/LICENSE +0 -0
  10. {x_transformers-2.1.14 → x_transformers-2.1.15}/README.md +0 -0
  11. {x_transformers-2.1.14 → x_transformers-2.1.15}/data/README.md +0 -0
  12. {x_transformers-2.1.14 → x_transformers-2.1.15}/data/enwik8.gz +0 -0
  13. {x_transformers-2.1.14 → x_transformers-2.1.15}/images/all-attention.png +0 -0
  14. {x_transformers-2.1.14 → x_transformers-2.1.15}/images/attention-on-attention.png +0 -0
  15. {x_transformers-2.1.14 → x_transformers-2.1.15}/images/cosine-sim-attention.png +0 -0
  16. {x_transformers-2.1.14 → x_transformers-2.1.15}/images/deepnorm.png +0 -0
  17. {x_transformers-2.1.14 → x_transformers-2.1.15}/images/dynamic-pos-bias-linear.png +0 -0
  18. {x_transformers-2.1.14 → x_transformers-2.1.15}/images/dynamic-pos-bias-log.png +0 -0
  19. {x_transformers-2.1.14 → x_transformers-2.1.15}/images/dynamic-pos-bias-sinusoidal.png +0 -0
  20. {x_transformers-2.1.14 → x_transformers-2.1.15}/images/dynamic-pos-bias.png +0 -0
  21. {x_transformers-2.1.14 → x_transformers-2.1.15}/images/enhanced-recurrence.png +0 -0
  22. {x_transformers-2.1.14 → x_transformers-2.1.15}/images/fcm.png +0 -0
  23. {x_transformers-2.1.14 → x_transformers-2.1.15}/images/ffglu.png +0 -0
  24. {x_transformers-2.1.14 → x_transformers-2.1.15}/images/flash-attention.png +0 -0
  25. {x_transformers-2.1.14 → x_transformers-2.1.15}/images/gate_values.png +0 -0
  26. {x_transformers-2.1.14 → x_transformers-2.1.15}/images/gating.png +0 -0
  27. {x_transformers-2.1.14 → x_transformers-2.1.15}/images/length-extrapolation-scale.png +0 -0
  28. {x_transformers-2.1.14 → x_transformers-2.1.15}/images/macaron-1.png +0 -0
  29. {x_transformers-2.1.14 → x_transformers-2.1.15}/images/macaron-2.png +0 -0
  30. {x_transformers-2.1.14 → x_transformers-2.1.15}/images/memory-transformer.png +0 -0
  31. {x_transformers-2.1.14 → x_transformers-2.1.15}/images/normformer.png +0 -0
  32. {x_transformers-2.1.14 → x_transformers-2.1.15}/images/pia.png +0 -0
  33. {x_transformers-2.1.14 → x_transformers-2.1.15}/images/qknorm-analysis.png +0 -0
  34. {x_transformers-2.1.14 → x_transformers-2.1.15}/images/resi_dual.png +0 -0
  35. {x_transformers-2.1.14 → x_transformers-2.1.15}/images/residual_attn.png +0 -0
  36. {x_transformers-2.1.14 → x_transformers-2.1.15}/images/rezero.png +0 -0
  37. {x_transformers-2.1.14 → x_transformers-2.1.15}/images/rotary.png +0 -0
  38. {x_transformers-2.1.14 → x_transformers-2.1.15}/images/sandwich-2.png +0 -0
  39. {x_transformers-2.1.14 → x_transformers-2.1.15}/images/sandwich.png +0 -0
  40. {x_transformers-2.1.14 → x_transformers-2.1.15}/images/sandwich_norm.png +0 -0
  41. {x_transformers-2.1.14 → x_transformers-2.1.15}/images/scalenorm.png +0 -0
  42. {x_transformers-2.1.14 → x_transformers-2.1.15}/images/talking-heads.png +0 -0
  43. {x_transformers-2.1.14 → x_transformers-2.1.15}/images/topk-attention.png +0 -0
  44. {x_transformers-2.1.14 → x_transformers-2.1.15}/images/xval.png +0 -0
  45. {x_transformers-2.1.14 → x_transformers-2.1.15}/train_copy.py +0 -0
  46. {x_transformers-2.1.14 → x_transformers-2.1.15}/train_enwik8.py +0 -0
  47. {x_transformers-2.1.14 → x_transformers-2.1.15}/train_length_extrapolate.py +0 -0
  48. {x_transformers-2.1.14 → x_transformers-2.1.15}/train_parity.py +0 -0
  49. {x_transformers-2.1.14 → x_transformers-2.1.15}/x_transformers/__init__.py +0 -0
  50. {x_transformers-2.1.14 → x_transformers-2.1.15}/x_transformers/attend.py +0 -0
  51. {x_transformers-2.1.14 → x_transformers-2.1.15}/x_transformers/autoregressive_wrapper.py +0 -0
  52. {x_transformers-2.1.14 → x_transformers-2.1.15}/x_transformers/continuous.py +0 -0
  53. {x_transformers-2.1.14 → x_transformers-2.1.15}/x_transformers/dpo.py +0 -0
  54. {x_transformers-2.1.14 → x_transformers-2.1.15}/x_transformers/multi_input.py +0 -0
  55. {x_transformers-2.1.14 → x_transformers-2.1.15}/x_transformers/neo_mlp.py +0 -0
  56. {x_transformers-2.1.14 → x_transformers-2.1.15}/x_transformers/nonautoregressive_wrapper.py +0 -0
  57. {x_transformers-2.1.14 → x_transformers-2.1.15}/x_transformers/x_transformers.py +0 -0
  58. {x_transformers-2.1.14 → x_transformers-2.1.15}/x_transformers/xl_autoregressive_wrapper.py +0 -0
  59. {x_transformers-2.1.14 → x_transformers-2.1.15}/x_transformers/xval.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: x-transformers
3
- Version: 2.1.14
3
+ Version: 2.1.15
4
4
  Summary: X-Transformers
5
5
  Project-URL: Homepage, https://pypi.org/project/x-transformers/
6
6
  Project-URL: Repository, https://github.com/lucidrains/x-transformers
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "x-transformers"
3
- version = "2.1.14"
3
+ version = "2.1.15"
4
4
  description = "X-Transformers"
5
5
  authors = [
6
6
  { name = "Phil Wang", email = "lucidrains@gmail.com" }
@@ -739,5 +739,5 @@ def test_belief_state_wrapper(
739
739
  if goal_suffix:
740
740
  suffix = torch.randint(0, 20000, (2, 2))
741
741
 
742
- sampled = model.generate_with_suffix_token_only(seq[:, :1], 16, suffix = suffix)
742
+ sampled = model.generate_with_suffix_cond(seq[:, :1], 16, suffix = suffix)
743
743
  assert sampled.shape == (2, 16)
@@ -1,7 +1,7 @@
1
1
 
2
2
  # Belief State Transformer
3
3
 
4
- # https://arxiv.org/abs/2410.23506
4
+ # Hu et al. https://arxiv.org/abs/2410.23506
5
5
  # https://www.youtube.com/watch?v=aqhbRtB2Fyg
6
6
 
7
7
  from __future__ import annotations
@@ -107,7 +107,7 @@ class BeliefStateWrapper(Module):
107
107
 
108
108
  @torch.no_grad()
109
109
  @eval_decorator
110
- def generate_with_suffix_token_only(
110
+ def generate_with_suffix_cond(
111
111
  self,
112
112
  prompts,
113
113
  seq_len,
@@ -148,7 +148,7 @@ class BeliefStateWrapper(Module):
148
148
  return_embeddings = True
149
149
  )
150
150
 
151
- # pick out the last embedding for fill in the model
151
+ # pick out the last embedding for fill in the middle
152
152
 
153
153
  suffix_embed = suffix_embed[:, -1:]
154
154
 
File without changes