divergent-beamsearch 0.1.6__py3-none-any.whl → 0.1.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -76,6 +76,12 @@ class AcceptEverythingParser(Parser):
76
76
  def copy(self):
77
77
  return self
78
78
 
79
+ def index_reduce_lists(x : torch.Tensor, indices : list[list[int]], reduce_func=torch.sum) -> torch.Tensor:
80
+ values = []
81
+ for i, index in enumerate(indices):
82
+ values.append(reduce_func(x[i, index], dim=-1))
83
+ return torch.tensor(values, dtype=x.dtype, device=x.device, requires_grad=x.requires_grad)
84
+
79
85
  @torch.no_grad()
80
86
  def divergent_beamsearch(input_ids : torch.Tensor, model : GPT2LMHeadModel, beam_size : int, max_length : int, parser : Parser, pad_token_id : int, batch_size=32, num_solutions = None, end_symb=DEFAULT_END_SYMB) -> tuple[torch.Tensor, torch.Tensor]:
81
87
  assert input_ids.shape[0] == 1, "Batch size must be 1"
@@ -120,7 +126,8 @@ def divergent_beamsearch(input_ids : torch.Tensor, model : GPT2LMHeadModel, beam
120
126
 
121
127
  scores_finished_current = scores_unfinished[can_end]
122
128
  solutions_finished_current = solutions_unfinished[can_end]
123
- scores_finished_current = scores_finished_current + log1mexp(logprobs[can_end, select_mask(parsers_tokens, can_end)].logsumexp(dim=-1)).squeeze(-1)
129
+ logprob_other_ans = index_reduce_lists(logprobs[can_end], select_mask(parsers_tokens, can_end), reduce_func=torch.logsumexp).squeeze(-1)
130
+ scores_finished_current = scores_finished_current + log1mexp(logprob_other_ans)
124
131
  scores_finished = torch.cat([scores_finished, scores_finished_current])
125
132
  if len(solutions_finished_current):
126
133
  pad = torch.full((len(scores_finished_current), solutions_finished_current.shape[1] - solutions_finished.shape[1]), pad_token_id, dtype=torch.long)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: divergent-beamsearch
3
- Version: 0.1.6
3
+ Version: 0.1.7
4
4
  Summary: A variant of the beam search algorithm that focuses on finding answers that maximize the probability of generating an answer before diverging into another subject.
5
5
  License-File: LICENCE
6
6
  Requires-Python: >=3.11
@@ -0,0 +1,6 @@
1
+ divergent_beamsearch/__init__.py,sha256=qrpVRoT3d-q1N9fJnzHI2X13e71LDY4-6eLOQ_gwCqQ,62
2
+ divergent_beamsearch/algorithm.py,sha256=TUm2pbFhR0DqfGDm1fqQXqojNCAkFRmuvj4jbFCxwHc,9228
3
+ divergent_beamsearch-0.1.7.dist-info/METADATA,sha256=JWuN6f2YjjOXoYxAFzR7vmVYwPyL2HDXI7huY67gAmU,2826
4
+ divergent_beamsearch-0.1.7.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
5
+ divergent_beamsearch-0.1.7.dist-info/licenses/LICENCE,sha256=gnISbTzmuQC7NwJaGOdjoq26QYgSuKndq5q2JykifKw,1075
6
+ divergent_beamsearch-0.1.7.dist-info/RECORD,,
@@ -1,6 +0,0 @@
1
- divergent_beamsearch/__init__.py,sha256=qrpVRoT3d-q1N9fJnzHI2X13e71LDY4-6eLOQ_gwCqQ,62
2
- divergent_beamsearch/algorithm.py,sha256=73BsS5IU1_4Aj11LdQebKofpCO-Mo8BXtDfx-AEYxXA,8835
3
- divergent_beamsearch-0.1.6.dist-info/METADATA,sha256=cm-VyQfzk9sklvIkFXgEfI4A4ktWddIO5CvtaL7Vkng,2826
4
- divergent_beamsearch-0.1.6.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
5
- divergent_beamsearch-0.1.6.dist-info/licenses/LICENCE,sha256=gnISbTzmuQC7NwJaGOdjoq26QYgSuKndq5q2JykifKw,1075
6
- divergent_beamsearch-0.1.6.dist-info/RECORD,,