divergent-beamsearch 0.2.0__py3-none-any.whl → 0.2.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,9 @@
1
1
  import math
2
2
  import torch
3
- from transformers import GPT2LMHeadModel
3
+ try:
4
+ from transformers import GPT2LMHeadModel
5
+ except ImportError:
6
+ pass
4
7
  from multi_choices_parser import DEFAULT_END_SYMB
5
8
 
6
9
 
@@ -35,7 +38,7 @@ def apply_mask_tokens(pred : torch.Tensor, parsers_tokens):
35
38
  return pred[~pred.isinf().all(dim=-1)]
36
39
 
37
40
 
38
- def batched_inference_logits(model : GPT2LMHeadModel, input_ids : torch.Tensor,
41
+ def batched_inference_logits(model : "GPT2LMHeadModel", input_ids : torch.Tensor,
39
42
  attention_mask : torch.Tensor | None = None, batch_size : int = 32,
40
43
  to_cpu=False) -> torch.Tensor:
41
44
  logits = []
@@ -96,7 +99,7 @@ def pad_to_same_size(tensors : list[torch.Tensor], padding_value : int) -> torch
96
99
  return torch.cat(padded_tensors, dim=0)
97
100
 
98
101
  @torch.no_grad()
99
- def divergent_beamsearch(input_ids : torch.Tensor, model : GPT2LMHeadModel, beam_size : int,
102
+ def divergent_beamsearch(input_ids : torch.Tensor, model : "GPT2LMHeadModel", beam_size : int,
100
103
  max_length : int, parser : Parser, pad_token_id : int, batch_size=32,
101
104
  num_solutions = None, end_symb=DEFAULT_END_SYMB, optimize_gpu_mem=True) -> tuple[torch.Tensor, torch.Tensor]:
102
105
  assert input_ids.shape[0] == 1, "Batch size must be 1"
@@ -180,7 +183,7 @@ def set_slice_row(x : torch.Tensor, slices : torch.IntTensor, value) -> torch.Te
180
183
  x[i].index_fill_(0, indices[i], 0)
181
184
 
182
185
  @torch.no_grad()
183
- def divergent_logprob(input_ids : torch.Tensor, attention_mask : torch.Tensor | None, model : GPT2LMHeadModel,
186
+ def divergent_logprob(input_ids : torch.Tensor, attention_mask : torch.Tensor | None, model : "GPT2LMHeadModel",
184
187
  parsers : Parser | list[Parser] | None, batch_size=32,
185
188
  start : int | torch.IntTensor = None, end_symb=DEFAULT_END_SYMB, optimize_gpu_mem=True) -> torch.FloatTensor:
186
189
  if start is None:
@@ -1,12 +1,11 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: divergent-beamsearch
3
- Version: 0.2.0
3
+ Version: 0.2.1
4
4
  Summary: A variant of the beam search algorithm that focuses on finding answers that maximize the probability of generating an answer before diverging into another subject.
5
5
  License-File: LICENCE
6
6
  Requires-Python: >=3.11
7
- Requires-Dist: multi-choices-parser>=0.9.61
7
+ Requires-Dist: multi-choices-parser>=0.9.72
8
8
  Requires-Dist: torch>=2.0.0
9
- Requires-Dist: transformers>=4.47.1
10
9
  Description-Content-Type: text/markdown
11
10
 
12
11
  # Divergent Beam Search
@@ -0,0 +1,6 @@
1
+ divergent_beamsearch/__init__.py,sha256=qrpVRoT3d-q1N9fJnzHI2X13e71LDY4-6eLOQ_gwCqQ,62
2
+ divergent_beamsearch/algorithm.py,sha256=GKFwi6aKNmJRu9SR6X96JT93SbOpy84fxyKJ5Pq5vQs,9961
3
+ divergent_beamsearch-0.2.1.dist-info/METADATA,sha256=0JAVae-tlHYFQkaEqBOE9ZDtExKsS-gpFFFb9oNTRdg,2790
4
+ divergent_beamsearch-0.2.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
5
+ divergent_beamsearch-0.2.1.dist-info/licenses/LICENCE,sha256=gnISbTzmuQC7NwJaGOdjoq26QYgSuKndq5q2JykifKw,1075
6
+ divergent_beamsearch-0.2.1.dist-info/RECORD,,
@@ -1,6 +0,0 @@
1
- divergent_beamsearch/__init__.py,sha256=qrpVRoT3d-q1N9fJnzHI2X13e71LDY4-6eLOQ_gwCqQ,62
2
- divergent_beamsearch/algorithm.py,sha256=lx27rXddHiyzisINgWI5MuatRLIU2ObnZhtCvojbGJ8,9917
3
- divergent_beamsearch-0.2.0.dist-info/METADATA,sha256=u4-bH-9qa_yLJPemATIemwIavOCucF7CCv0kyJV6_Qg,2826
4
- divergent_beamsearch-0.2.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
5
- divergent_beamsearch-0.2.0.dist-info/licenses/LICENCE,sha256=gnISbTzmuQC7NwJaGOdjoq26QYgSuKndq5q2JykifKw,1075
6
- divergent_beamsearch-0.2.0.dist-info/RECORD,,